From 95e43c8e027a3bc6871565b7e328f8f0ca39b728 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 14:36:04 +0200 Subject: [PATCH 01/15] Rename branch, create AggResponse, AggResult, create IpRangeAggregation, type-safe and regular def in ElastocAggregation without description --- .../elasticsearch/ElasticAggregation.scala | 10 ++++ .../aggregation/Aggregations.scala | 53 +++++++++++++++++++ .../response/AggregationResponse.scala | 25 +++++++++ .../result/AggregationResult.scala | 11 ++++ 4 files changed, 99 insertions(+) diff --git a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala index 22c2b08b0..949d7abe5 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala @@ -160,6 +160,16 @@ object ElasticAggregation { final def filterAggregation(name: String, query: ElasticQuery[_]): FilterAggregation = Filter(name = name, query = query, subAggregations = Chunk.empty) + def ipRangeAggregation( + name: String, + field: Field[_, String], + ranges: Chunk[IpRange.IpRangeBound] + ): IpRangeAggregation = + IpRange(name = name, field = field.toString, ranges = ranges, keyed = None, subAggregations = Chunk.empty) + + def ipRangeAggregation(name: String, field: String, ranges: Chunk[IpRange.IpRangeBound]): IpRangeAggregation = + IpRange(name = name, field = field, ranges = ranges, keyed = None, subAggregations = Chunk.empty) + /** * Constructs a type-safe instance of [[zio.elasticsearch.aggregation.MaxAggregation]] using the specified parameters. * diff --git a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala index ec194dc27..89afa7d52 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala @@ -205,6 +205,59 @@ private[elasticsearch] final case class Filter( } } +sealed trait IpRangeAggregation extends SingleElasticAggregation with WithAgg with WithSubAgg[IpRangeAggregation] + +final case class IpRange( + name: String, + field: String, + ranges: Chunk[IpRange.IpRangeBound], + keyed: Option[Boolean], + subAggregations: Chunk[SingleElasticAggregation] +) extends IpRangeAggregation { self => + + def keyed(value: Boolean): IpRangeAggregation = + self.copy(keyed = Some(value)) + + def withAgg(aggregation: SingleElasticAggregation): MultipleAggregations = + multipleAggregations.aggregations(self, aggregation) + + def withSubAgg(aggregation: SingleElasticAggregation): IpRangeAggregation = + self.copy(subAggregations = aggregation +: subAggregations) + + private[elasticsearch] def toJson: Json = { + val rangesJson = ranges.map(_.toJson) + + val keyedJson = keyed.fold(Obj())(k => Obj("keyed" -> k.toJson)) + val subAggsJson = subAggregations.nonEmptyOrElse(Obj())(sa => Obj("aggs" -> sa.map(_.toJson).reduce(_ merge _))) + + Obj( + name -> ( + Obj("ip_range" -> (Obj("field" -> field.toJson, "ranges" -> Arr(rangesJson)) merge keyedJson)) merge subAggsJson + ) + ) + } +} + +object IpRange { + + final case class IpRangeBound( + from: Option[String] = None, + to: Option[String] = None, + mask: Option[String] = None, + key: Option[String] = None + ) { + def toJson: Json = { + val baseFields = Chunk.empty[(String, Json)] ++ + from.map("from" -> _.toJson) ++ + to.map("to" -> _.toJson) ++ + mask.map("mask" -> _.toJson) ++ + key.map("key" -> _.toJson) + + Obj(baseFields: _*) + } + } +} + sealed trait MaxAggregation extends SingleElasticAggregation with HasMissing[MaxAggregation] with WithAgg private[elasticsearch] final case class Max(name: String, field: String, missing: Option[Double]) diff --git a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala index 12497006e..d31b2758a 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala @@ -157,6 +157,8 @@ private[elasticsearch] case class BucketDecoder(fields: Chunk[(String, Json)]) e ) case str if str.contains("filter#") => Some(field -> data.unsafeAs[FilterAggregationResponse](FilterAggregationResponse.decoder)) + case str if str.contains("ip_range#") => + Some(field -> data.unsafeAs[IpRangeAggregationResponse](IpRangeAggregationResponse.decoder)) case str if str.contains("max#") => Some(field -> MaxAggregationResponse(value = objFields("value").unsafeAs[Double])) case str if str.contains("min#") => @@ -202,6 +204,8 @@ private[elasticsearch] case class BucketDecoder(fields: Chunk[(String, Json)]) e (field.split("#")(1), data.asInstanceOf[ExtendedStatsAggregationResponse]) case str if str.contains("filter#") => (field.split("#")(1), data.asInstanceOf[FilterAggregationResponse]) + case str if str.contains("ip_range#") => + (field.split("#")(1), data.asInstanceOf[IpRangeAggregationResponse]) case str if str.contains("max#") => (field.split("#")(1), data.asInstanceOf[MaxAggregationResponse]) case str if str.contains("min#") => @@ -285,6 +289,27 @@ private[elasticsearch] sealed trait JsonDecoderOps { } } +private[elasticsearch] final case class IpRangeAggregationBucket( + key: String, + from: Option[String], + to: Option[String], + @jsonField("doc_count") + docCount: Int +) extends AggregationBucket + +private[elasticsearch] object IpRangeAggregationBucket { + implicit val decoder: JsonDecoder[IpRangeAggregationBucket] = DeriveJsonDecoder.gen[IpRangeAggregationBucket] +} + +private[elasticsearch] final case class IpRangeAggregationResponse( + buckets: Chunk[IpRangeAggregationBucket] +) extends AggregationResponse + +private[elasticsearch] object IpRangeAggregationResponse { + implicit val decoder: JsonDecoder[IpRangeAggregationResponse] = + DeriveJsonDecoder.gen[IpRangeAggregationResponse] +} + private[elasticsearch] final case class MaxAggregationResponse(value: Double) extends AggregationResponse private[elasticsearch] object MaxAggregationResponse { diff --git a/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala b/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala index 0f6037cf4..e9bdb6222 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala @@ -53,6 +53,17 @@ final case class FilterAggregationResult private[elasticsearch] ( } } +final case class IpRangeAggregationBucketResult private[elasticsearch] ( + key: String, + from: Option[String], + to: Option[String], + docCount: Int +) extends AggregationResult + +final case class IpRangeAggregationResult private[elasticsearch] ( + buckets: Chunk[IpRangeAggregationBucketResult] +) extends AggregationResult + final case class MaxAggregationResult private[elasticsearch] (value: Double) extends AggregationResult final case class MinAggregationResult private[elasticsearch] (value: Double) extends AggregationResult From 34aa41395c9d2d920e6ca424cb292234c9f85e59 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 14:41:32 +0200 Subject: [PATCH 02/15] Add option for subAggregation. --- .../result/AggregationResult.scala | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala b/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala index e9bdb6222..1b24a0ffc 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/result/AggregationResult.scala @@ -53,16 +53,25 @@ final case class FilterAggregationResult private[elasticsearch] ( } } +final case class IpRangeAggregationResult private[elasticsearch] ( + buckets: Chunk[IpRangeAggregationBucketResult] +) extends AggregationResult + final case class IpRangeAggregationBucketResult private[elasticsearch] ( key: String, from: Option[String], to: Option[String], - docCount: Int -) extends AggregationResult + docCount: Int, + subAggregations: Map[String, AggregationResult] +) extends AggregationResult { -final case class IpRangeAggregationResult private[elasticsearch] ( - buckets: Chunk[IpRangeAggregationBucketResult] -) extends AggregationResult + def subAggregationAs[A <: AggregationResult](aggName: String): Either[DecodingException, Option[A]] = + subAggregations.get(aggName) match { + case Some(agg: A) => Right(Some(agg)) + case Some(_) => Left(DecodingException(s"Aggregation with name $aggName was not of type you provided.")) + case None => Right(None) + } +} final case class MaxAggregationResult private[elasticsearch] (value: Double) extends AggregationResult From 17e75b17fbef08d2dfda10b8a9303fdfa1b04310 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 14:56:30 +0200 Subject: [PATCH 03/15] Add asIprangeAggregation in package and add case in SearchWithAggregationsResponse. --- .../response/SearchWithAggregationsResponse.scala | 2 ++ .../src/main/scala/zio/elasticsearch/package.scala | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/modules/library/src/main/scala/zio/elasticsearch/executor/response/SearchWithAggregationsResponse.scala b/modules/library/src/main/scala/zio/elasticsearch/executor/response/SearchWithAggregationsResponse.scala index f02886732..4de59edae 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/executor/response/SearchWithAggregationsResponse.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/executor/response/SearchWithAggregationsResponse.scala @@ -82,6 +82,8 @@ private[elasticsearch] final case class SearchWithAggregationsResponse( ExtendedStatsAggregationResponse.decoder.decodeJson(data.toString).map(field.split("#")(1) -> _) case str if str.contains("filter#") => FilterAggregationResponse.decoder.decodeJson(data.toString).map(field.split("#")(1) -> _) + case str if str.contains("ip_range#") => + IpRangeAggregationResponse.decoder.decodeJson(data.toString).map(field.split("#")(1) -> _) case str if str.contains("max#") => MaxAggregationResponse.decoder.decodeJson(data.toString).map(field.split("#")(1) -> _) case str if str.contains("min#") => diff --git a/modules/library/src/main/scala/zio/elasticsearch/package.scala b/modules/library/src/main/scala/zio/elasticsearch/package.scala index 8c7d1eefd..56ef49c3e 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/package.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/package.scala @@ -96,6 +96,18 @@ package object elasticsearch extends IndexNameNewtype with IndexPatternNewtype w def asExtendedStatsAggregation(name: String): RIO[R, Option[ExtendedStatsAggregationResult]] = aggregationAs[ExtendedStatsAggregationResult](name) + /** + * Executes the [[ElasticRequest.SearchRequest]] or the [[ElasticRequest.SearchAndAggregateRequest]]. + * + * @param name + * the name of the aggregation to retrieve + * @return + * a [[RIO]] effect that, when executed, will produce the aggregation as instance of + * [[result.IpRangeAggregationResult]]. + */ + def asIpRangeAggregation(name: String): RIO[R, Option[IpRangeAggregationResult]] = + aggregationAs[IpRangeAggregationResult](name) + /** * Executes the [[ElasticRequest.SearchRequest]] or the [[ElasticRequest.SearchAndAggregateRequest]]. * From 217870ed9e727aecd5397ae1a53eeb21d0174bfd Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 15:48:36 +0200 Subject: [PATCH 04/15] Done first test in HttpExecutorSpec --- .../zio/elasticsearch/HttpExecutorSpec.scala | 41 ++++++++++++++++++- .../response/AggregationResponse.scala | 13 ++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala index 24efb9e44..c07122d7d 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala @@ -21,7 +21,7 @@ import zio.elasticsearch.ElasticAggregation._ import zio.elasticsearch.ElasticHighlight.highlight import zio.elasticsearch.ElasticQuery.{script => _, _} import zio.elasticsearch.ElasticSort.sortBy -import zio.elasticsearch.aggregation.AggregationOrder +import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} import zio.elasticsearch.data.GeoPoint import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} import zio.elasticsearch.executor.Executor @@ -467,6 +467,43 @@ object HttpExecutorSpec extends IntegrationSpec { Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie ), + test("aggregate using ip range aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => + val updatedA = docA.copy(stringField = "192.168.1.10") + val updatedB = docB.copy(stringField = "192.168.1.200") + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) + .refreshTrue + ) + + aggregation = IpRange( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("192.168.1.100")), + IpRange.IpRangeBound( + from = Some("192.168.1.100"), + to = Some("192.168.1.255") + ) + ), + keyed = None, + subAggregations = Chunk.empty + ) + + result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + agg <- result.aggregation("ip_range_agg") + } yield assertTrue(agg.nonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), test("aggregate using terms aggregation with max aggregation as a sub aggregation") { checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => @@ -1064,7 +1101,7 @@ object HttpExecutorSpec extends IntegrationSpec { _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) res <- Executor.execute(ElasticRequest.knnSearch(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated)))) + } yield assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated))) } } @@ around( Executor.execute( diff --git a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala index d31b2758a..48d89316d 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala @@ -77,6 +77,19 @@ object AggregationResponse { (key, toResult(response)) }) ) + case IpRangeAggregationResponse(buckets) => + IpRangeAggregationResult( + buckets.map(b => + IpRangeAggregationBucketResult( + key = b.key, + from = b.from, + to = b.to, + docCount = b.docCount, + subAggregations = Map.empty + ) + ) + ) + case MaxAggregationResponse(value) => MaxAggregationResult(value) case MinAggregationResponse(value) => From 6f7fc4767934549c58a82946e371348107365146 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 16:00:54 +0200 Subject: [PATCH 05/15] Add desc in ElasticAggregation --- .../elasticsearch/ElasticAggregation.scala | 29 +++++++++++++++++++ .../response/AggregationResponse.scala | 1 - 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala index 949d7abe5..e7da783a8 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala @@ -160,6 +160,21 @@ object ElasticAggregation { final def filterAggregation(name: String, query: ElasticQuery[_]): FilterAggregation = Filter(name = name, query = query, subAggregations = Chunk.empty) + /** + * Constructs a type-safe instance of [[zio.elasticsearch.aggregation.IpRangeAggregation]] using the specified + * parameters. + * + * @param name + * Aggregation name. + * @param field + * The field for which the IP range aggregation will be executed + * @param ranges + * A chunk of IP range bounds specifying the ranges. + * @param subAggregations + * Optional map of sub-aggregations to nest within this aggregation + * @return + * An instance of [[IpRangeAggregation]] that represents filter aggregation to be performed. + */ def ipRangeAggregation( name: String, field: Field[_, String], @@ -167,6 +182,20 @@ object ElasticAggregation { ): IpRangeAggregation = IpRange(name = name, field = field.toString, ranges = ranges, keyed = None, subAggregations = Chunk.empty) + /** + * Constructs an instance of [[zio.elasticsearch.aggregation.IpRangeAggregation]] using the specified parameters. + * + * @param name + * Aggregation name. + * @param field + * The field (as string) for which the IP range aggregation will be executed. + * @param ranges + * A chunk of IP range bounds specifying the ranges. + * @param subAggregations + * Optional map of sub-aggregations to nest within this aggregation. + * @return + * An instance of [[IpRangeAggregation]] configured with the provided parameters. + */ def ipRangeAggregation(name: String, field: String, ranges: Chunk[IpRange.IpRangeBound]): IpRangeAggregation = IpRange(name = name, field = field, ranges = ranges, keyed = None, subAggregations = Chunk.empty) diff --git a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala index 48d89316d..6815fdcc3 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/executor/response/AggregationResponse.scala @@ -89,7 +89,6 @@ object AggregationResponse { ) ) ) - case MaxAggregationResponse(value) => MaxAggregationResult(value) case MinAggregationResponse(value) => From a008b525a484469b6f35271f23d197c4905e2a87 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 17:15:31 +0200 Subject: [PATCH 06/15] Change subagg into Option parameter and add one more test. --- .../zio/elasticsearch/HttpExecutorSpec.scala | 5840 +++++----- .../zio/elasticsearch/IntegrationSpec.scala | 226 +- .../elasticsearch/ElasticAggregation.scala | 14 +- .../aggregation/Aggregations.scala | 15 +- .../zio/elasticsearch/ElasticQuerySpec.scala | 9356 ++++++++--------- .../zio/elasticsearch/FieldDSLSpec.scala | 174 +- .../HttpElasticExecutorSpec.scala | 596 +- .../zio/elasticsearch/IndexNameSpec.scala | 192 +- 8 files changed, 8225 insertions(+), 8188 deletions(-) diff --git a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala index c07122d7d..b62c25206 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala @@ -1,2903 +1,2937 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticAggregation._ -import zio.elasticsearch.ElasticHighlight.highlight -import zio.elasticsearch.ElasticQuery.{script => _, _} -import zio.elasticsearch.ElasticSort.sortBy -import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.query.DistanceUnit.Kilometers -import zio.elasticsearch.query.FunctionScoreFunction.randomScoreFunction -import zio.elasticsearch.query.MultiMatchType._ -import zio.elasticsearch.query.sort.SortMode.Max -import zio.elasticsearch.query.sort.SortOrder._ -import zio.elasticsearch.query.sort.SourceType.NumberType -import zio.elasticsearch.query.{Distance, FunctionScoreBoostMode, FunctionScoreFunction, InnerHits} -import zio.elasticsearch.request.{CreationOutcome, DeletionOutcome} -import zio.elasticsearch.result.{FilterAggregationResult, Item, MaxAggregationResult, UpdateByQueryResult} -import zio.elasticsearch.script.{Painless, Script} -import zio.json.ast.Json.{Arr, Str} -import zio.schema.codec.JsonCodec -import zio.stream.{Sink, ZSink} -import zio.test.Assertion._ -import zio.test.TestAspect._ -import zio.test._ - -import java.time.LocalDate -import scala.util.Random - -object HttpExecutorSpec extends IntegrationSpec { - - def spec: Spec[TestEnvironment, Any] = { - suite("Executor")( - suite("HTTP Executor")( - suite("aggregation")( - test("aggregate using avg aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(doubleField = 20)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(doubleField = 10)) - .refreshTrue - ) - aggregation = avgAggregation(name = "aggregationDouble", field = TestDocument.doubleField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asAvgAggregation("aggregationDouble") - } yield assert(aggsRes.head.value)(equalTo(15.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using cardinality aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 10)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 20)) - .refreshTrue - ) - aggregation = cardinalityAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - - cardinalityAgg <- aggsRes.asCardinalityAggregation("aggregationInt") - } yield assert(cardinalityAgg.map(_.value))(isSome(equalTo(2))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using extended stats aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 100)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 50)) - .refreshTrue - ) - aggregation = extendedStatsAggregation(name = "aggregation", field = TestDocument.intField).sigma(3) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asExtendedStatsAggregation("aggregation") - } yield assert(aggsRes.head.count)(equalTo(2)) && - assert(aggsRes.head.min)(equalTo(50.0)) && - assert(aggsRes.head.max)(equalTo(100.0)) && - assert(aggsRes.head.avg)(equalTo(75.0)) && - assert(aggsRes.head.sum)(equalTo(150.0)) && - assert(aggsRes.head.sumOfSquares)(equalTo(12500.0)) && - assert(aggsRes.head.variance)(equalTo(625.0)) && - assert(aggsRes.head.variancePopulation)(equalTo(625.0)) && - assert(aggsRes.head.varianceSampling)(equalTo(1250.0)) && - assert(aggsRes.head.stdDeviation)(equalTo(25.0)) && - assert(aggsRes.head.stdDeviationPopulation)(equalTo(25.0)) && - assert(aggsRes.head.stdDeviationSampling)(equalTo(35.35533905932738)) && - assert(aggsRes.head.stdDeviationBoundsResult.upper)(equalTo(150.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.lower)(equalTo(0.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.upperPopulation)(equalTo(150.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.lowerPopulation)(equalTo(0.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.upperSampling)(equalTo(181.06601717798213)) && - assert(aggsRes.head.stdDeviationBoundsResult.lowerSampling)(equalTo(-31.066017177982133)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using filter aggregation with max aggregation as a sub aggregation") { - val expectedResult = ( - "aggregation", - FilterAggregationResult( - docCount = 2, - subAggregations = Map( - "subAggregation" -> MaxAggregationResult(value = 5.0) - ) - ) - ) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(stringField = "test", intField = 7) - secondDocumentUpdated = - secondDocument.copy(stringField = "filterAggregation", intField = 3) - thirdDocumentUpdated = - thirdDocument.copy(stringField = "filterAggregation", intField = 5) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocumentUpdated - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentUpdated - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - thirdDocumentId, - thirdDocumentUpdated - ) - .refreshTrue - ) - query = term(field = TestDocument.stringField, value = secondDocumentUpdated.stringField.toLowerCase) - aggregation = - filterAggregation(name = "aggregation", query = query).withSubAgg( - maxAggregation("subAggregation", TestDocument.intField) - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - - } yield assert(aggsRes.head)(equalTo(expectedResult)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using max aggregation") { - val expectedResponse = ("aggregationInt", MaxAggregationResult(value = 20.0)) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 20)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 10)) - .refreshTrue - ) - aggregation = maxAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes.head)(equalTo(expectedResponse)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using min aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) - .refreshTrue - ) - aggregation = minAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asMinAggregation("aggregationInt") - } yield assert(aggsRes.head.value)(equalTo(23.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentile ranks aggregation") { - val expectedResult = Map("500.0" -> 55.55555555555555, "600.0" -> 100.0) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 400)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 500)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 550)) - .refreshTrue - ) - aggregation = - percentileRanksAggregation(name = "aggregation", field = "intField", value = 500.0, values = 600.0) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asPercentileRanksAggregation("aggregation") - } yield assert(aggsRes.head.values)(equalTo(expectedResult)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation with multi index") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- Executor - .execute( - ElasticRequest.aggregate( - selectors = MultiIndex.names(firstSearchIndex, secondSearchIndex), - aggregation = aggregation - ) - ) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation with index pattern") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- Executor - .execute( - ElasticRequest.aggregate( - selectors = IndexPatternAll, - aggregation = aggregation - ) - ) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation as sub aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - termsAggregation(name = "first", field = TestDocument.stringField.keyword) - .withSubAgg(percentilesAggregation(name = "second", field = TestSubDocument.intField)) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using stats aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 7)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 6)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 10)) - .refreshTrue - ) - aggregation = statsAggregation(name = "aggregation", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asStatsAggregation("aggregation") - } yield assert(aggsRes.head.count)(equalTo(3)) && - assert(aggsRes.head.min)(equalTo(6.0)) && - assert(aggsRes.head.max)(equalTo(10.0)) && - assert(aggsRes.head.avg)(equalTo(7.666666666666667)) && - assert(aggsRes.head.sum)(equalTo(23.0)) - - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using sum aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) - .refreshTrue - ) - aggregation = sumAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asSumAggregation("aggregationInt") - } yield assert(aggsRes.head.value)(equalTo(223.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using ip range aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => - val updatedA = docA.copy(stringField = "192.168.1.10") - val updatedB = docB.copy(stringField = "192.168.1.200") - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) - .refreshTrue - ) - - aggregation = IpRange( - name = "ip_range_agg", - field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("192.168.1.100")), - IpRange.IpRangeBound( - from = Some("192.168.1.100"), - to = Some("192.168.1.255") - ) - ), - keyed = None, - subAggregations = Chunk.empty - ) - - result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) - agg <- result.aggregation("ip_range_agg") - } yield assertTrue(agg.nonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using terms aggregation with max aggregation as a sub aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - termsAggregation(name = "aggregationString", field = TestDocument.stringField.keyword).withSubAgg( - maxAggregation("subAggregation", TestDocument.intField) - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using missing aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = multipleAggregations.aggregations( - missingAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ), - missingAggregation(name = "aggregationString", field = "stringField.keyword") - ) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using multiple terms aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = multipleAggregations.aggregations( - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ), - termsAggregation(name = "aggregationInt", field = "intField.keyword") - ) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using terms aggregation with nested max aggregation and bucket sort aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 2)) - .refreshTrue - ) - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).orderBy(AggregationOrder("aggregationInt", Desc)) - .withSubAgg(maxAggregation(name = "aggregationInt", field = "intField")) - .withSubAgg( - bucketSortAggregation("aggregationBucket").sort( - ElasticSort.sortBy("aggregationInt").order(Desc) - ) - ) - .size(1) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - agg <- aggsRes.asTermsAggregation("aggregationString") - } yield assert(agg.map(_.buckets.size))(isSome(equalTo(1))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using value count aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocument.copy(stringField = "test") - ) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocument.copy(stringField = "test") - ) - .refreshTrue - ) - aggregation = valueCountAggregation(name = "aggregation", field = TestDocument.stringField.keyword) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asValueCountAggregation("aggregation") - - } yield assert(aggsRes.head.value)(equalTo(2)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using weighted avg aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocument.copy(doubleField = 5, intField = 2) - ) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocument.copy(doubleField = 10, intField = 3) - ) - .refreshTrue - ) - aggregation = weightedAvgAggregation( - name = "weightedAggregation", - valueField = TestDocument.doubleField, - weightField = TestDocument.intField - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asWeightedAvgAggregation("weightedAggregation") - } yield assert(aggsRes.head.value)(equalTo(8.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("search with aggregation")( - test("search for first result using match all query with multiple terms aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchAll - aggregation = termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField")) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - .from(0) - .size(1) - ) - docs <- res.documentAs[TestDocument] - aggs <- res.aggregations - } yield assert(docs.length)(equalTo(1)) && assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test( - "search for first result using match all query with multiple terms aggregations and search after parameter" - ) { - checkOnce(genTestDocument) { firstDocument => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - reqs = (0 to 20).map { i => - ElasticRequest.create[TestDocument]( - firstSearchIndex, - firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = matchAll - aggregation = termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField")) - res <- Executor - .execute( - ElasticRequest - .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - sa <- res.lastSortValue - res2 <- Executor - .execute( - ElasticRequest - .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) - .searchAfter(sa.get) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - docs <- res2.documentAs[TestDocument] - aggs <- res2.aggregations - } yield assert(docs.length)(equalTo(10)) && assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search using match all query with multiple terms aggregations with descending sort on one field") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val firstDocumentWithFixedIntField = firstDocument.copy(intField = 25) - val secondDocumentWithFixedIntField = secondDocument.copy(intField = 32) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentWithFixedIntField) - .refreshTrue - ) - query = matchAll - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField.keyword")) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - .sort(sortBy(field = TestDocument.intField).order(Desc)) - ) - docs <- res.documentAs[TestDocument] - aggs <- res.aggregations - } yield assert(docs)(equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField))) && - assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test( - "search using match all query with terms aggregations, nested max aggregation and nested bucketSelector aggregation" - ) { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 100)) - .refreshTrue - ) - query = matchAll - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withSubAgg(maxAggregation(name = "aggregationInt", field = TestDocument.intField)) - .withSubAgg( - bucketSelectorAggregation( - name = "aggregationSelector", - script = Script("params.aggregation_int > 10"), - bucketsPath = Map("aggregation_int" -> "aggregationInt") - ) - ) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - ) - docs <- res.documentAs[TestDocument] - termsAgg <- res.asTermsAggregation("aggregationString") - } yield assert(docs)(isNonEmpty) && assert( - termsAgg.map(_.buckets.size) - )(isSome(equalTo(1))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("counting documents")( - test("successfully count documents with given query") { - checkOnce(genTestDocument) { document => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstCountIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstCountIndex, document).refreshTrue - ) - res <- Executor.execute(ElasticRequest.count(firstCountIndex, matchAll)) - } yield assert(res)(equalTo(1)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstCountIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstCountIndex)).orDie - ), - test("successfully count documents without given query") { - checkOnce(genTestDocument) { document => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondCountIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](secondCountIndex, document).refreshTrue - ) - res <- Executor.execute(ElasticRequest.count(secondCountIndex)) - } yield assert(res)(equalTo(1)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondCountIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondCountIndex)).orDie - ) - ) @@ shrinks(0), - suite("creating document")( - test("successfully create document") { - checkOnce(genTestDocument) { document => - for { - docId <- Executor.execute(ElasticRequest.create[TestDocument](index, document)) - res <- Executor.execute(ElasticRequest.getById(index, docId)).documentAs[TestDocument] - } yield assert(res)(isSome(equalTo(document))) - } - }, - test("successfully create document with ID given") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - assertZIO(Executor.execute(ElasticRequest.create[TestDocument](index, documentId, document)))( - equalTo(CreationOutcome.Created) - ) - } - }, - test("return 'AlreadyExists' if document with given ID already exists") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) - res <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, secondDocument)) - } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) - } - } - ), - suite("creating index")( - test("successfully create index") { - assertZIO(Executor.execute(ElasticRequest.createIndex(createIndexTestName)))( - equalTo(CreationOutcome.Created) - ) - }, - test("return 'AlreadyExists' if index already exists") { - for { - _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - res <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) - } - ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), - suite("creating or updating document")( - test("successfully create document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document))) - } - }, - test("successfully update document") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, firstDocument)) - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, secondDocument)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(secondDocument))) - } - } - ), - suite("deleting document by ID")( - test("successfully delete existing document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.deleteById(index, documentId)) - } yield assert(res)(equalTo(DeletionOutcome.Deleted)) - } - }, - test("return 'NotFound' if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.deleteById(index, documentId)))( - equalTo(DeletionOutcome.NotFound) - ) - } - } - ), - suite("delete index")( - test("successfully delete existing index") { - checkOnce(genIndexName) { name => - for { - _ <- Executor.execute(ElasticRequest.createIndex(name)) - res <- Executor.execute(ElasticRequest.deleteIndex(name)) - } yield assert(res)(equalTo(DeletionOutcome.Deleted)) - } - }, - test("return 'NotFound' if index does not exists") { - checkOnce(genIndexName) { name => - assertZIO(Executor.execute(ElasticRequest.deleteIndex(name)))(equalTo(DeletionOutcome.NotFound)) - } - } - ), - suite("finding document")( - test("return true if the document exists") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.exists(index, documentId)) - } yield assert(res)(isTrue) - } - }, - test("return false if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.exists(index, documentId)))(isFalse) - } - } - ), - suite("retrieving document by ID")( - test("successfully return document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(res)(isSome(equalTo(document))) - } - }, - test("return None if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument])(isNone) - } - }, - test("fail with throwable if decoding fails") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val result = for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestSubDocument] - } yield res - - assertZIO(result.exit)( - fails(isSubtype[Exception](assertException("Could not parse the document: .nestedField(missing)"))) - ) - } - } - ), - suite("refresh index")( - test("successfully refresh existing index") { - assertZIO(Executor.execute(ElasticRequest.refresh(index)))(isTrue) - }, - test("successfully refresh more existing indices") { - for { - _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - res <- Executor.execute(ElasticRequest.refresh(MultiIndex.names(index, createIndexTestName))) - } yield assert(res)(isTrue) - }, - test("successfully refresh all indices") { - assertZIO(Executor.execute(ElasticRequest.refresh(IndexPatternAll)))(isTrue) - }, - test("return false if index does not exists") { - assertZIO(Executor.execute(ElasticRequest.refresh(refreshFailIndex)))(isFalse) - } - ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), - suite("retrieving document by IDs")( - test("find documents by ids") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ids(firstDocumentId.toString, secondDocumentId.toString) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query) - ) - items <- res.items - } yield assert(items != null)(isTrue) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("kNN search")( - test("search for top two results") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(vectorField = List(1, 5, -20)) - secondDocumentUpdated = secondDocument.copy(vectorField = List(42, 8, -15)) - thirdDocumentUpdated = thirdDocument.copy(vectorField = List(15, 11, 23)) - req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) - req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) - req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) - query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) - res <- Executor.execute(ElasticRequest.knnSearch(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for top two results with filters") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(intField = 15, vectorField = List(1, 5, -20)) - secondDocumentUpdated = secondDocument.copy(intField = 21, vectorField = List(42, 8, -15)) - thirdDocumentUpdated = thirdDocument.copy(intField = 4, vectorField = List(15, 11, 23)) - req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) - req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) - req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) - query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) - filter = ElasticQuery.range(TestDocument.intField).gt(10) - res <- Executor - .execute(ElasticRequest.knnSearch(firstSearchIndex, query).filter(filter)) - .documentAs[TestDocument] - } yield (assert(res)(equalTo(Chunk(firstDocumentUpdated, secondDocumentUpdated)))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents")( - test("search for a document using a boosting query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is a ${firstDocument.stringField} test", intField = 7) - secondDocumentUpdated = - secondDocument.copy( - stringField = s"this is another ${secondDocument.stringField} test", - intField = 5 - ) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - .refreshTrue - ) - query = boosting( - negativeBoost = 0.1f, - negativeQuery = - term(field = TestDocument.stringField, value = firstDocument.stringField.toLowerCase), - positiveQuery = matchPhrase( - field = TestDocument.stringField, - value = "test" - ) - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated)))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a constant score query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "this is a test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = constantScore( - matchPhrase( - field = TestDocument.stringField, - value = "test" - ) - ).boost(2.1) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for first 2 documents using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).from(0).size(2)) - .documentAs[TestDocument] - } yield assert(res.length)(equalTo(2)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for first 2 documents using range query with date format") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - val firstDocumentUpdated = firstDocument.copy(dateField = LocalDate.now.minusDays(2)) - val secondDocumentUpdated = secondDocument.copy(dateField = LocalDate.now) - val thirdDocumentUpdated = thirdDocument.copy(dateField = LocalDate.now.plusDays(2)) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - .refreshTrue - ) - query = range(TestDocument.dateField).gte(LocalDate.now).format("yyyy-MM-dd").boost(1.0) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, thirdDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for documents with source filtering") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).includes[PartialTestDocument]) - items <- res.items - } yield assert(items.map(item => Right(item.raw)))( - hasSameElements( - List(firstDocument, secondDocument, thirdDocument).map(document => - TestDocument.schema.migrate(PartialTestDocument.schema).flatMap(_(document)).flatMap { - partialDocument => - JsonCodec.jsonEncoder(PartialTestDocument.schema).toJsonAST(partialDocument) - } - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("fail if an excluded source field is attempted to be decoded") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - val result = - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - _ <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).excludes("intField")) - .documentAs[TestDocument] - } yield () - - assertZIO(result.exit)( - fails( - isSubtype[Exception]( - assertException("Could not parse all documents successfully: .intField(missing)") - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("fail if any of results cannot be decoded") { - checkOnce(genDocumentId, genDocumentId, genTestDocument, genTestSubDocument) { - (documentId, subDocumentId, document, subDocument) => - val result = - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](secondSearchIndex, documentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument](secondSearchIndex, subDocumentId, subDocument) - .refreshTrue - ) - query = range(TestDocument.intField).gte(0) - res <- Executor.execute(ElasticRequest.search(secondSearchIndex, query)).documentAs[TestDocument] - } yield res - - assertZIO(result.exit)( - fails( - isSubtype[Exception]( - assertException("Could not parse all documents successfully: .dateField(missing)") - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document which contains a specific prefix using a prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.prefix( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a disjunction max query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"This is a ${firstDocument.stringField} test.") - secondDocumentUpdated = - secondDocument.copy(stringField = - s"This is a ${secondDocument.stringField} test. It should be in the list before ${firstDocument.stringField}, because it has higher relevance score than ${firstDocument.stringField}" - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - .refreshTrue - ) - query = disjunctionMax( - term( - field = TestDocument.stringField, - value = firstDocument.stringField.toLowerCase - ), - matchPhrase( - field = TestDocument.stringField, - value = secondDocument.stringField - ) - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a fuzzy query") { - checkOnce(genDocumentId, genTestDocument) { (firstDocumentId, firstDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument).refreshTrue - ) - query = ElasticQuery.fuzzy( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.substring(1) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield { - assert(res)(Assertion.contains(firstDocument)) - } - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which contains a term using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.contains( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which starts with a term using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.startsWith( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which conforms to a pattern using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = wildcard( - field = TestDocument.stringField.keyword, - value = s"${firstDocument.stringField.take(2)}*${firstDocument.stringField.takeRight(2)}" - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match all query with index pattern") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - firstDocumentCopy = firstDocument.copy(stringField = "this is test") - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) - .refreshTrue - ) - secondDocumentCopy = secondDocument.copy(stringField = "this is test") - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) - .refreshTrue - ) - query = matchAll - res <- Executor - .execute(ElasticRequest.search(IndexPattern("search-index*"), query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( - Assertion.contains(secondDocumentCopy) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document using a match boolean prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "test this is boolean") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchBooleanPrefix(TestDocument.stringField, "this is test bo") - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match phrase query with multi index") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - firstDocumentCopy = firstDocument.copy(stringField = "this is test") - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) - .refreshTrue - ) - secondDocumentCopy = secondDocument.copy(stringField = "this is test") - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) - .refreshTrue - ) - query = matchPhrase( - field = TestDocument.stringField, - value = firstDocumentCopy.stringField - ) - - res <- Executor - .execute(ElasticRequest.search(MultiIndex.names(firstSearchIndex, secondSearchIndex), query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( - Assertion.contains(secondDocumentCopy) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document using a match phrase query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchPhrase( - field = TestDocument.stringField, - value = firstDocument.stringField - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(document)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match phrase prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = s"${firstDocument.stringField} test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchPhrasePrefix( - field = TestDocument.stringField, - value = s"${firstDocument.stringField} te" - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a multi match query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - - query = - multiMatch(value = "test").fields(TestDocument.stringField).matchingType(BestFields) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") - secondDocumentUpdated = - secondDocument.copy(stringField = s"this is ${secondDocument.stringField} another test") - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - ) - .refreshTrue - ) - query = terms( - field = TestDocument.stringField, - values = firstDocument.stringField.toLowerCase, - secondDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocumentUpdated, secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms set query with minimumShouldMatchField") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) - secondDocumentUpdated = - secondDocument.copy( - stringField = - s"this is ${secondDocument.stringField} another test, not ${firstDocument.stringField}", - intField = 2 - ) - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - .refreshTrue - ) - query = termsSet( - field = "stringField", - minimumShouldMatchField = "intField", - terms = secondDocument.stringField.toLowerCase, - firstDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms set query with minimumShouldMatchScript") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) - secondDocumentUpdated = - secondDocument.copy( - stringField = s"this is ${secondDocument.stringField} test, not ${firstDocument.stringField}", - intField = 2 - ) - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - .refreshTrue - ) - query = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = firstDocument.stringField.toLowerCase, - secondDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using nested query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - nested(path = TestDocument.subDocumentList, query = matchAll) - res <- - Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument, secondDocument))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using should with satisfying minimumShouldMatch condition") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should( - matches(TestDocument.stringField, firstDocument.stringField), - matches(TestDocument.intField, firstDocument.intField), - matches(TestDocument.doubleField, firstDocument.doubleField + 1) - ).minimumShouldMatch(2) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using script query") { - checkN(4)(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.script(Script("doc['booleanField'].value == true")) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument, secondDocument).filter(_.booleanField == true))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document that doesn't exist using regexp query without case insensitive ") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - ElasticQuery.regexp( - field = TestDocument.stringField, - value = - s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}".toUpperCase - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(!Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using regexp query with case insensitive") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery - .regexp( - field = TestDocument.stringField, - value = s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}" - ) - .caseInsensitiveTrue - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(firstDocument)) && assert(res)( - !Assertion.contains(secondDocument) - )) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using should with unsatisfying minimumShouldMatch condition") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should( - matches(TestDocument.stringField, firstDocument.stringField), - matches(TestDocument.intField, firstDocument.intField + 1), - matches(TestDocument.doubleField, firstDocument.doubleField + 1) - ).minimumShouldMatch(2) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(isEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents with inner hits")( - test("search for a document using nested query with inner hits") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - nested(path = TestDocument.subDocumentList, query = matchAll).innerHits - result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) - items <- result.items - res = - items.map(_.innerHitAs[TestSubDocument]("subDocumentList")).collect { case Right(value) => value } - } yield assert(res)( - hasSameElements(List(firstDocument.subDocumentList, secondDocument.subDocumentList)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents with highlights")( - test("successfully find document with highlight") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) - ) - items <- res.items - } yield assert(items.map(_.highlight("stringField")))( - hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find inner hit document with highlight") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = nested( - path = TestDocument.subDocumentList, - query = must( - matches( - TestSubDocument.stringField, - secondDocument.subDocumentList.headOption.map(_.stringField).getOrElse("foo") - ) - ) - ).innerHits( - InnerHits().highlights(highlight(TestSubDocument.stringField)) - ) - result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) - items <- result.items - res = items - .flatMap(_.innerHit("subDocumentList")) - .flatten - .flatMap(_.highlight("subDocumentList.stringField")) - .flatten - } yield assert(res)( - Assertion.contains( - secondDocument.subDocumentList.headOption - .map(doc => s"${doc.stringField}") - .getOrElse("foo") - ) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight using field accessor") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight(TestDocument.stringField)) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlights and return highlights map successfully") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) - ) - items <- res.items - } yield assert(items.map(_.highlights))( - hasSameElements(List(Some(Map("stringField" -> Chunk(s"${firstDocument.stringField}"))))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight while using global config") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest - .search(firstSearchIndex, query) - .highlights( - highlight(TestDocument.stringField) - .withGlobalConfig("pre_tags", Arr(Str("
    "))) - .withGlobalConfig("post_tags", Arr(Str("
"))) - ) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight while using local config to overwrite global config") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest - .search(firstSearchIndex, query) - .highlights( - highlight( - TestDocument.stringField, - config = Map("pre_tags" -> Arr(Str("
    ")), "post_tags" -> Arr(Str("
"))) - ) - .withGlobalConfig("pre_tags", Arr(Str("
    "))) - .withGlobalConfig("post_tags", Arr(Str("
"))) - ) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("searching for sorted documents")( - test("search for document sorted by descending age and by ascending birthDate using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val firstDocumentWithFixedIntField = - firstDocument.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) - val secondDocumentWithFixedIntField = - secondDocument.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentWithFixedIntField - ) - .refreshTrue - ) - query = range(TestDocument.intField).gte(20) - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort( - sortBy(TestDocument.intField).order(Desc), - sortBy(TestDocument.dateField).order(Asc).format("strict_date_optional_time_nanos") - ) - ) - .documentAs[TestDocument] - } yield assert(res)( - equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for document sorted by script where age is ascending using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstEmployee, secondDocumentId, secondEmployee) => - val firstDocumentWithFixedIntField = - firstEmployee.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) - val secondDocumentWithFixedIntField = - secondEmployee.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentWithFixedIntField - ) - .refreshTrue - ) - query = range(TestDocument.intField).gte(20) - res <- - Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort(sortBy(Script("doc['intField'].value").lang(Painless), NumberType).order(Asc)) - ) - .documentAs[TestDocument] - } yield assert(res)( - equalTo(Chunk(firstDocumentWithFixedIntField, secondDocumentWithFixedIntField)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for document sorted descending with 'max' mode by one field using matchAll query") { - checkOnce(genDocumentId, genTestSubDocument, genDocumentId, genTestSubDocument) { - (firstDocumentId, firstSubDocument, secondDocumentId, secondSubDocument) => - val firstSubDocumentWithFixedIntList = firstSubDocument.copy(intFieldList = List(11, 4, 37)) - val secondSubDocumentWithFixedIntList = secondSubDocument.copy(intFieldList = List(30, 29, 35)) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument](firstSearchIndex, firstDocumentId, firstSubDocumentWithFixedIntList) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument]( - firstSearchIndex, - secondDocumentId, - secondSubDocumentWithFixedIntList - ) - .refreshTrue - ) - query = matchAll - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort(sortBy(TestSubDocument.intFieldList).mode(Max).order(Desc)) - ) - .documentAs[TestSubDocument] - } yield assert(res)( - equalTo(Chunk(firstSubDocumentWithFixedIntList, secondSubDocumentWithFixedIntList)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using scroll API and returning them as a stream")( - test("search for documents using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) - } yield assert(res)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for documents using range query with multiple pages") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 203).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream( - ElasticRequest.search(secondSearchIndex, query) - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(204))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for documents using range query with multiple pages and return type") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = - ZSink.collectAll[TestDocument] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .streamAs[TestDocument](ElasticRequest.search(secondSearchIndex, query)) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for documents using range query - empty stream") { - val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) - } yield assert(res)(hasSize(equalTo(0))) - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using PIT (point in time) and returning them as a stream")( - test("successfully create PIT and return stream results") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test( - "successfully create PIT and return stream results with changed page size and different keep alive parameters" - ) { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream( - ElasticRequest.search(secondSearchIndex, query), - StreamConfig.SearchAfter.withPageSize(40).keepAliveFor("2m") - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("successfully create PIT(point in time) and return stream results as specific type") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = - ZSink.collectAll[TestDocument] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .streamAs[TestDocument]( - ElasticRequest.search(secondSearchIndex, query), - StreamConfig.SearchAfter - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("successfully create point in time and return empty stream if there is no valid results") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(200.0) - res <- Executor - .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) - .run(sink) - } yield assert(res)(isEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using SearchAfter Query")( - test("search for document sorted by ascending age while using search after query") { - checkOnce(genTestDocument) { firstDocument => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - reqs = (0 to 100).map { i => - ElasticRequest.create[TestDocument]( - firstSearchIndex, - firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.intField).gte(10) - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - sa <- res.lastSortValue - res2 <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .searchAfter(sa.get) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - .documentAs[TestDocument] - } yield assert(res2.map(_.intField))( - equalTo(Chunk.fromIterable(20 to 29)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ shrinks(0) - ), - suite("deleting by query")( - test("successfully delete all matched documents") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - firstDocumentId, - firstDocument.copy(doubleField = 150) - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - secondDocumentId, - secondDocument.copy(doubleField = 350) - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - thirdDocumentId, - thirdDocument.copy(doubleField = 400) - ) - .refreshTrue - ) - deleteQuery = range(TestDocument.doubleField).gte(300.0) - _ <- Executor - .execute(ElasticRequest.deleteByQuery(deleteByQueryIndex, deleteQuery).refreshTrue) - res <- Executor - .execute(ElasticRequest.search(deleteByQueryIndex, matchAll)) - .documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument.copy(doubleField = 150)))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(deleteByQueryIndex)), - Executor.execute(ElasticRequest.deleteIndex(deleteByQueryIndex)).orDie - ), - test("returns NotFound when provided index is missing") { - checkOnce(genIndexName) { missingIndex => - assertZIO(Executor.execute(ElasticRequest.deleteByQuery(missingIndex, matchAll)))( - equalTo(DeletionOutcome.NotFound) - ) - } - } - ), - suite("bulk query")( - test("successfully execute bulk query") { - checkOnce(genDocumentId, genDocumentId, genDocumentId, genTestDocument) { - (firstDocumentId, secondDocumentId, thirdDocumentId, document) => - for { - _ <- Executor.execute( - ElasticRequest - .create[TestDocument](index, firstDocumentId, document.copy(stringField = "randomIdString")) - ) - _ <- - Executor.execute( - ElasticRequest - .create[TestDocument](index, secondDocumentId, document.copy(stringField = "randomIdString2")) - .refreshTrue - ) - req1 = ElasticRequest.create[TestDocument](index, thirdDocumentId, document) - req2 = ElasticRequest.create[TestDocument](index, document.copy(stringField = "randomIdString3")) - req3 = ElasticRequest.upsert[TestDocument](index, firstDocumentId, document.copy(doubleField = 3000)) - req4 = ElasticRequest.deleteById(index, secondDocumentId) - req5 = ElasticRequest.update[TestDocument](index, thirdDocumentId, document.copy(intField = 100)) - req6 = ElasticRequest.updateByScript( - index, - firstDocumentId, - Script("ctx._source.intField = params['factor']").params("factor" -> 100) - ) - req7 = - ElasticRequest - .update[TestDocument](index, DocumentId("invalid-document-id"), document.copy(intField = 100)) - res <- - Executor.execute(ElasticRequest.bulk(req1, req2, req3, req4, req5, req6, req7).refreshTrue) - doc1 <- Executor.execute(ElasticRequest.getById(index, firstDocumentId)).documentAs[TestDocument] - doc2 <- Executor.execute(ElasticRequest.getById(index, secondDocumentId)).documentAs[TestDocument] - doc3 <- Executor.execute(ElasticRequest.getById(index, thirdDocumentId)).documentAs[TestDocument] - } yield assert(res.items.size)(equalTo(7)) && - assert(res.items.map(_.error.isDefined))( - equalTo(Chunk(false, false, false, false, false, false, true)) - ) && - assert(res.items(6).status)(equalTo(Some(404))) && - assert(res.items(6).error.map(_.`type`))(equalTo(Some("document_missing_exception"))) && - assert(doc3)(isSome(equalTo(document.copy(intField = 100)))) && - assert(doc2)(isNone) && assert(doc1)( - isSome(equalTo(document.copy(doubleField = 3000, intField = 100))) - ) - } - } - ), - suite("updating document")( - test("successfully update document with script") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val intField = document.intField - val factor = 2 - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - _ <- Executor.execute( - ElasticRequest.updateByScript( - index, - documentId, - Script("ctx._source.intField += params['factor']").params("factor" -> factor) - ) - ) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document.copy(intField = intField + factor)))) - } - }, - test("successfully create document if it does not exist") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute( - ElasticRequest - .updateByScript( - index, - documentId, - Script("ctx._source.intField += params['factor']").params("factor" -> 2) - ) - .orCreate(document) - ) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document))) - } - }, - test("successfully update document with doc") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) - _ <- Executor.execute(ElasticRequest.update[TestDocument](index, documentId, secondDocument)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(secondDocument))) - } - } - ), - suite("updating document by query")( - test("successfully update document with only script") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val stringField = "StringField" - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, document).refreshTrue - ) - updateRes <- - Executor.execute( - ElasticRequest - .updateAllByQuery( - updateByQueryIndex, - Script("ctx._source['stringField'] = params['str']").params("str" -> stringField) - ) - .refreshTrue - ) - doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] - } yield assert(updateRes)( - equalTo( - UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) - ) - ) && assert(doc)(isSome(equalTo(document.copy(stringField = stringField)))) - } - }, - test("successfully update document with script and query") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val newDocument = document.copy(stringField = "StringField") - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, newDocument).refreshTrue - ) - updateRes <- - Executor.execute( - ElasticRequest - .updateByQuery( - index = updateByQueryIndex, - query = term(field = TestDocument.stringField.keyword, value = "StringField"), - script = Script("ctx._source['intField']++") - ) - .refreshTrue - ) - doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] - } yield assert(updateRes)( - equalTo( - UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) - ) - ) && assert(doc)(isSome(equalTo(newDocument.copy(intField = newDocument.intField + 1)))) - } - } - ), - suite("geo-distance query")( - test("using geo-distance query") { - checkOnce(genTestDocument) { document => - val indexDefinition = - """ - |{ - | "mappings": { - | "properties": { - | "geoPointField": { - | "type": "geo_point" - | } - | } - | } - |} - |""".stripMargin - - for { - _ <- Executor.execute(ElasticRequest.createIndex(geoDistanceIndex, indexDefinition)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(geoDistanceIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](geoDistanceIndex, document).refreshTrue - ) - result <- Executor - .execute( - ElasticRequest.search( - geoDistanceIndex, - ElasticQuery - .geoDistance( - "geoPointField", - GeoPoint(document.geoPointField.lat, document.geoPointField.lon), - Distance(300, Kilometers) - ) - ) - ) - .documentAs[TestDocument] - } yield assert(result)(equalTo(Chunk(document))) - } - } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoDistanceIndex)).orDie) - ), - suite("geo-polygon query")( - test("using geo-polygon query") { - checkOnce(genTestDocument) { document => - val indexDefinition = - """ - |{ - | "mappings": { - | "properties": { - | "geoPointField": { - | "type": "geo_point" - | } - | } - | } - |} - |""".stripMargin - - for { - _ <- Executor.execute(ElasticRequest.createIndex(geoPolygonIndex, indexDefinition)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(geoPolygonIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](geoPolygonIndex, document).refreshTrue - ) - - r1 <- Executor - .execute( - ElasticRequest.search( - geoPolygonIndex, - ElasticQuery - .geoPolygon("geoPointField", Chunk("0, 0", "0, 90", "90, 90", "90, 0")) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)(equalTo(Chunk(document))) - } - } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoPolygonIndex)).orDie) - ), - suite("search for documents using FunctionScore query")( - test("using randomScore function") { - checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => - val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue - ) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - firstSearchIndex, - secondDocumentUpdated - ) - .refreshTrue - ) - r1 <- Executor - .execute( - ElasticRequest.search( - firstSearchIndex, - ElasticQuery - .functionScore(randomScoreFunction()) - .query(matches("stringField", firstDocument.stringField)) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)( - hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("using randomScore function and weight function") { - checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => - val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue - ) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - firstSearchIndex, - secondDocumentUpdated - ) - .refreshTrue - ) - r1 <- Executor - .execute( - ElasticRequest.search( - firstSearchIndex, - ElasticQuery - .functionScore( - FunctionScoreFunction.randomScoreFunction(), - FunctionScoreFunction.weightFunction(2) - ) - .query(matches("stringField", firstDocument.stringField)) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Max) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)( - hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) - ) @@ nondeterministic @@ sequential @@ prepareElasticsearchIndexForTests @@ afterAll( - Executor.execute(ElasticRequest.deleteIndex(index)).orDie - ) - ).provideShared( - elasticsearchLayer - ) - } -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticAggregation._ +import zio.elasticsearch.ElasticHighlight.highlight +import zio.elasticsearch.ElasticQuery.{script => _, _} +import zio.elasticsearch.ElasticSort.sortBy +import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.query.DistanceUnit.Kilometers +import zio.elasticsearch.query.FunctionScoreFunction.randomScoreFunction +import zio.elasticsearch.query.MultiMatchType._ +import zio.elasticsearch.query.sort.SortMode.Max +import zio.elasticsearch.query.sort.SortOrder._ +import zio.elasticsearch.query.sort.SourceType.NumberType +import zio.elasticsearch.query.{Distance, FunctionScoreBoostMode, FunctionScoreFunction, InnerHits} +import zio.elasticsearch.request.{CreationOutcome, DeletionOutcome} +import zio.elasticsearch.result.{FilterAggregationResult, Item, MaxAggregationResult, UpdateByQueryResult} +import zio.elasticsearch.script.{Painless, Script} +import zio.json.ast.Json.{Arr, Str} +import zio.schema.codec.JsonCodec +import zio.stream.{Sink, ZSink} +import zio.test.Assertion._ +import zio.test.TestAspect._ +import zio.test._ + +import java.time.LocalDate +import scala.util.Random + +object HttpExecutorSpec extends IntegrationSpec { + + def spec: Spec[TestEnvironment, Any] = { + suite("Executor")( + suite("HTTP Executor")( + suite("aggregation")( + test("aggregate using avg aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(doubleField = 20)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(doubleField = 10)) + .refreshTrue + ) + aggregation = avgAggregation(name = "aggregationDouble", field = TestDocument.doubleField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asAvgAggregation("aggregationDouble") + } yield assert(aggsRes.head.value)(equalTo(15.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using cardinality aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 10)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 20)) + .refreshTrue + ) + aggregation = cardinalityAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + + cardinalityAgg <- aggsRes.asCardinalityAggregation("aggregationInt") + } yield assert(cardinalityAgg.map(_.value))(isSome(equalTo(2))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using extended stats aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 100)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 50)) + .refreshTrue + ) + aggregation = extendedStatsAggregation(name = "aggregation", field = TestDocument.intField).sigma(3) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asExtendedStatsAggregation("aggregation") + } yield assert(aggsRes.head.count)(equalTo(2)) && + assert(aggsRes.head.min)(equalTo(50.0)) && + assert(aggsRes.head.max)(equalTo(100.0)) && + assert(aggsRes.head.avg)(equalTo(75.0)) && + assert(aggsRes.head.sum)(equalTo(150.0)) && + assert(aggsRes.head.sumOfSquares)(equalTo(12500.0)) && + assert(aggsRes.head.variance)(equalTo(625.0)) && + assert(aggsRes.head.variancePopulation)(equalTo(625.0)) && + assert(aggsRes.head.varianceSampling)(equalTo(1250.0)) && + assert(aggsRes.head.stdDeviation)(equalTo(25.0)) && + assert(aggsRes.head.stdDeviationPopulation)(equalTo(25.0)) && + assert(aggsRes.head.stdDeviationSampling)(equalTo(35.35533905932738)) && + assert(aggsRes.head.stdDeviationBoundsResult.upper)(equalTo(150.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.lower)(equalTo(0.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.upperPopulation)(equalTo(150.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.lowerPopulation)(equalTo(0.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.upperSampling)(equalTo(181.06601717798213)) && + assert(aggsRes.head.stdDeviationBoundsResult.lowerSampling)(equalTo(-31.066017177982133)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using filter aggregation with max aggregation as a sub aggregation") { + val expectedResult = ( + "aggregation", + FilterAggregationResult( + docCount = 2, + subAggregations = Map( + "subAggregation" -> MaxAggregationResult(value = 5.0) + ) + ) + ) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(stringField = "test", intField = 7) + secondDocumentUpdated = + secondDocument.copy(stringField = "filterAggregation", intField = 3) + thirdDocumentUpdated = + thirdDocument.copy(stringField = "filterAggregation", intField = 5) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocumentUpdated + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentUpdated + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + thirdDocumentId, + thirdDocumentUpdated + ) + .refreshTrue + ) + query = term(field = TestDocument.stringField, value = secondDocumentUpdated.stringField.toLowerCase) + aggregation = + filterAggregation(name = "aggregation", query = query).withSubAgg( + maxAggregation("subAggregation", TestDocument.intField) + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + + } yield assert(aggsRes.head)(equalTo(expectedResult)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using max aggregation") { + val expectedResponse = ("aggregationInt", MaxAggregationResult(value = 20.0)) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 20)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 10)) + .refreshTrue + ) + aggregation = maxAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes.head)(equalTo(expectedResponse)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using min aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) + .refreshTrue + ) + aggregation = minAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asMinAggregation("aggregationInt") + } yield assert(aggsRes.head.value)(equalTo(23.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentile ranks aggregation") { + val expectedResult = Map("500.0" -> 55.55555555555555, "600.0" -> 100.0) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 400)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 500)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 550)) + .refreshTrue + ) + aggregation = + percentileRanksAggregation(name = "aggregation", field = "intField", value = 500.0, values = 600.0) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asPercentileRanksAggregation("aggregation") + } yield assert(aggsRes.head.values)(equalTo(expectedResult)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation with multi index") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- Executor + .execute( + ElasticRequest.aggregate( + selectors = MultiIndex.names(firstSearchIndex, secondSearchIndex), + aggregation = aggregation + ) + ) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation with index pattern") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- Executor + .execute( + ElasticRequest.aggregate( + selectors = IndexPatternAll, + aggregation = aggregation + ) + ) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation as sub aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + termsAggregation(name = "first", field = TestDocument.stringField.keyword) + .withSubAgg(percentilesAggregation(name = "second", field = TestSubDocument.intField)) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using stats aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 7)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 6)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 10)) + .refreshTrue + ) + aggregation = statsAggregation(name = "aggregation", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asStatsAggregation("aggregation") + } yield assert(aggsRes.head.count)(equalTo(3)) && + assert(aggsRes.head.min)(equalTo(6.0)) && + assert(aggsRes.head.max)(equalTo(10.0)) && + assert(aggsRes.head.avg)(equalTo(7.666666666666667)) && + assert(aggsRes.head.sum)(equalTo(23.0)) + + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using sum aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) + .refreshTrue + ) + aggregation = sumAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asSumAggregation("aggregationInt") + } yield assert(aggsRes.head.value)(equalTo(223.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using ip range aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => + val updatedA = docA.copy(stringField = "192.168.1.10") + val updatedB = docB.copy(stringField = "192.168.1.200") + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) + .refreshTrue + ) + + aggregation = IpRange( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("192.168.1.100")), + IpRange.IpRangeBound( + from = Some("192.168.1.100"), + to = Some("192.168.1.255") + ) + ), + keyed = None, + subAggregations = None + ) + + result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + agg <- result.aggregation("ip_range_agg") + } yield assertTrue(agg.nonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using ip range aggregation with CIDR masks") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docId1, doc1, docId2, doc2) => + val updated1 = doc1.copy(stringField = "10.0.0.10") + val updated2 = doc2.copy(stringField = "10.0.0.120") + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docId1, updated1)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, docId2, updated2) + .refreshTrue + ) + + aggregation = IpRange( + name = "cidr_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), + IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) + ), + keyed = None, + subAggregations = None + ) + + result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + agg <- result.aggregation("cidr_agg") + } yield assertTrue(agg.nonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using terms aggregation with max aggregation as a sub aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + termsAggregation(name = "aggregationString", field = TestDocument.stringField.keyword).withSubAgg( + maxAggregation("subAggregation", TestDocument.intField) + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using missing aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = multipleAggregations.aggregations( + missingAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ), + missingAggregation(name = "aggregationString", field = "stringField.keyword") + ) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using multiple terms aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = multipleAggregations.aggregations( + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ), + termsAggregation(name = "aggregationInt", field = "intField.keyword") + ) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using terms aggregation with nested max aggregation and bucket sort aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 2)) + .refreshTrue + ) + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).orderBy(AggregationOrder("aggregationInt", Desc)) + .withSubAgg(maxAggregation(name = "aggregationInt", field = "intField")) + .withSubAgg( + bucketSortAggregation("aggregationBucket").sort( + ElasticSort.sortBy("aggregationInt").order(Desc) + ) + ) + .size(1) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + agg <- aggsRes.asTermsAggregation("aggregationString") + } yield assert(agg.map(_.buckets.size))(isSome(equalTo(1))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using value count aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocument.copy(stringField = "test") + ) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocument.copy(stringField = "test") + ) + .refreshTrue + ) + aggregation = valueCountAggregation(name = "aggregation", field = TestDocument.stringField.keyword) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asValueCountAggregation("aggregation") + + } yield assert(aggsRes.head.value)(equalTo(2)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using weighted avg aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocument.copy(doubleField = 5, intField = 2) + ) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocument.copy(doubleField = 10, intField = 3) + ) + .refreshTrue + ) + aggregation = weightedAvgAggregation( + name = "weightedAggregation", + valueField = TestDocument.doubleField, + weightField = TestDocument.intField + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asWeightedAvgAggregation("weightedAggregation") + } yield assert(aggsRes.head.value)(equalTo(8.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("search with aggregation")( + test("search for first result using match all query with multiple terms aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchAll + aggregation = termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField")) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + .from(0) + .size(1) + ) + docs <- res.documentAs[TestDocument] + aggs <- res.aggregations + } yield assert(docs.length)(equalTo(1)) && assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test( + "search for first result using match all query with multiple terms aggregations and search after parameter" + ) { + checkOnce(genTestDocument) { firstDocument => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + reqs = (0 to 20).map { i => + ElasticRequest.create[TestDocument]( + firstSearchIndex, + firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = matchAll + aggregation = termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField")) + res <- Executor + .execute( + ElasticRequest + .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + sa <- res.lastSortValue + res2 <- Executor + .execute( + ElasticRequest + .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) + .searchAfter(sa.get) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + docs <- res2.documentAs[TestDocument] + aggs <- res2.aggregations + } yield assert(docs.length)(equalTo(10)) && assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search using match all query with multiple terms aggregations with descending sort on one field") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val firstDocumentWithFixedIntField = firstDocument.copy(intField = 25) + val secondDocumentWithFixedIntField = secondDocument.copy(intField = 32) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentWithFixedIntField) + .refreshTrue + ) + query = matchAll + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField.keyword")) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + .sort(sortBy(field = TestDocument.intField).order(Desc)) + ) + docs <- res.documentAs[TestDocument] + aggs <- res.aggregations + } yield assert(docs)(equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField))) && + assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test( + "search using match all query with terms aggregations, nested max aggregation and nested bucketSelector aggregation" + ) { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 100)) + .refreshTrue + ) + query = matchAll + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withSubAgg(maxAggregation(name = "aggregationInt", field = TestDocument.intField)) + .withSubAgg( + bucketSelectorAggregation( + name = "aggregationSelector", + script = Script("params.aggregation_int > 10"), + bucketsPath = Map("aggregation_int" -> "aggregationInt") + ) + ) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + ) + docs <- res.documentAs[TestDocument] + termsAgg <- res.asTermsAggregation("aggregationString") + } yield assert(docs)(isNonEmpty) && assert( + termsAgg.map(_.buckets.size) + )(isSome(equalTo(1))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("counting documents")( + test("successfully count documents with given query") { + checkOnce(genTestDocument) { document => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstCountIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstCountIndex, document).refreshTrue + ) + res <- Executor.execute(ElasticRequest.count(firstCountIndex, matchAll)) + } yield assert(res)(equalTo(1)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstCountIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstCountIndex)).orDie + ), + test("successfully count documents without given query") { + checkOnce(genTestDocument) { document => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondCountIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](secondCountIndex, document).refreshTrue + ) + res <- Executor.execute(ElasticRequest.count(secondCountIndex)) + } yield assert(res)(equalTo(1)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondCountIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondCountIndex)).orDie + ) + ) @@ shrinks(0), + suite("creating document")( + test("successfully create document") { + checkOnce(genTestDocument) { document => + for { + docId <- Executor.execute(ElasticRequest.create[TestDocument](index, document)) + res <- Executor.execute(ElasticRequest.getById(index, docId)).documentAs[TestDocument] + } yield assert(res)(isSome(equalTo(document))) + } + }, + test("successfully create document with ID given") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + assertZIO(Executor.execute(ElasticRequest.create[TestDocument](index, documentId, document)))( + equalTo(CreationOutcome.Created) + ) + } + }, + test("return 'AlreadyExists' if document with given ID already exists") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) + res <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, secondDocument)) + } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) + } + } + ), + suite("creating index")( + test("successfully create index") { + assertZIO(Executor.execute(ElasticRequest.createIndex(createIndexTestName)))( + equalTo(CreationOutcome.Created) + ) + }, + test("return 'AlreadyExists' if index already exists") { + for { + _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + res <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) + } + ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), + suite("creating or updating document")( + test("successfully create document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document))) + } + }, + test("successfully update document") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, firstDocument)) + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, secondDocument)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(secondDocument))) + } + } + ), + suite("deleting document by ID")( + test("successfully delete existing document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.deleteById(index, documentId)) + } yield assert(res)(equalTo(DeletionOutcome.Deleted)) + } + }, + test("return 'NotFound' if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.deleteById(index, documentId)))( + equalTo(DeletionOutcome.NotFound) + ) + } + } + ), + suite("delete index")( + test("successfully delete existing index") { + checkOnce(genIndexName) { name => + for { + _ <- Executor.execute(ElasticRequest.createIndex(name)) + res <- Executor.execute(ElasticRequest.deleteIndex(name)) + } yield assert(res)(equalTo(DeletionOutcome.Deleted)) + } + }, + test("return 'NotFound' if index does not exists") { + checkOnce(genIndexName) { name => + assertZIO(Executor.execute(ElasticRequest.deleteIndex(name)))(equalTo(DeletionOutcome.NotFound)) + } + } + ), + suite("finding document")( + test("return true if the document exists") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.exists(index, documentId)) + } yield assert(res)(isTrue) + } + }, + test("return false if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.exists(index, documentId)))(isFalse) + } + } + ), + suite("retrieving document by ID")( + test("successfully return document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(res)(isSome(equalTo(document))) + } + }, + test("return None if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument])(isNone) + } + }, + test("fail with throwable if decoding fails") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val result = for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestSubDocument] + } yield res + + assertZIO(result.exit)( + fails(isSubtype[Exception](assertException("Could not parse the document: .nestedField(missing)"))) + ) + } + } + ), + suite("refresh index")( + test("successfully refresh existing index") { + assertZIO(Executor.execute(ElasticRequest.refresh(index)))(isTrue) + }, + test("successfully refresh more existing indices") { + for { + _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + res <- Executor.execute(ElasticRequest.refresh(MultiIndex.names(index, createIndexTestName))) + } yield assert(res)(isTrue) + }, + test("successfully refresh all indices") { + assertZIO(Executor.execute(ElasticRequest.refresh(IndexPatternAll)))(isTrue) + }, + test("return false if index does not exists") { + assertZIO(Executor.execute(ElasticRequest.refresh(refreshFailIndex)))(isFalse) + } + ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), + suite("retrieving document by IDs")( + test("find documents by ids") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ids(firstDocumentId.toString, secondDocumentId.toString) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query) + ) + items <- res.items + } yield assert(items != null)(isTrue) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("kNN search")( + test("search for top two results") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(vectorField = List(1, 5, -20)) + secondDocumentUpdated = secondDocument.copy(vectorField = List(42, 8, -15)) + thirdDocumentUpdated = thirdDocument.copy(vectorField = List(15, 11, 23)) + req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) + req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) + req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) + query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) + res <- Executor.execute(ElasticRequest.knnSearch(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for top two results with filters") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(intField = 15, vectorField = List(1, 5, -20)) + secondDocumentUpdated = secondDocument.copy(intField = 21, vectorField = List(42, 8, -15)) + thirdDocumentUpdated = thirdDocument.copy(intField = 4, vectorField = List(15, 11, 23)) + req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) + req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) + req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) + query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) + filter = ElasticQuery.range(TestDocument.intField).gt(10) + res <- Executor + .execute(ElasticRequest.knnSearch(firstSearchIndex, query).filter(filter)) + .documentAs[TestDocument] + } yield (assert(res)(equalTo(Chunk(firstDocumentUpdated, secondDocumentUpdated)))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents")( + test("search for a document using a boosting query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is a ${firstDocument.stringField} test", intField = 7) + secondDocumentUpdated = + secondDocument.copy( + stringField = s"this is another ${secondDocument.stringField} test", + intField = 5 + ) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + .refreshTrue + ) + query = boosting( + negativeBoost = 0.1f, + negativeQuery = + term(field = TestDocument.stringField, value = firstDocument.stringField.toLowerCase), + positiveQuery = matchPhrase( + field = TestDocument.stringField, + value = "test" + ) + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated)))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a constant score query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "this is a test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = constantScore( + matchPhrase( + field = TestDocument.stringField, + value = "test" + ) + ).boost(2.1) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for first 2 documents using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).from(0).size(2)) + .documentAs[TestDocument] + } yield assert(res.length)(equalTo(2)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for first 2 documents using range query with date format") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + val firstDocumentUpdated = firstDocument.copy(dateField = LocalDate.now.minusDays(2)) + val secondDocumentUpdated = secondDocument.copy(dateField = LocalDate.now) + val thirdDocumentUpdated = thirdDocument.copy(dateField = LocalDate.now.plusDays(2)) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + .refreshTrue + ) + query = range(TestDocument.dateField).gte(LocalDate.now).format("yyyy-MM-dd").boost(1.0) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, thirdDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for documents with source filtering") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).includes[PartialTestDocument]) + items <- res.items + } yield assert(items.map(item => Right(item.raw)))( + hasSameElements( + List(firstDocument, secondDocument, thirdDocument).map(document => + TestDocument.schema.migrate(PartialTestDocument.schema).flatMap(_(document)).flatMap { + partialDocument => + JsonCodec.jsonEncoder(PartialTestDocument.schema).toJsonAST(partialDocument) + } + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("fail if an excluded source field is attempted to be decoded") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + val result = + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + _ <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).excludes("intField")) + .documentAs[TestDocument] + } yield () + + assertZIO(result.exit)( + fails( + isSubtype[Exception]( + assertException("Could not parse all documents successfully: .intField(missing)") + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("fail if any of results cannot be decoded") { + checkOnce(genDocumentId, genDocumentId, genTestDocument, genTestSubDocument) { + (documentId, subDocumentId, document, subDocument) => + val result = + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](secondSearchIndex, documentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument](secondSearchIndex, subDocumentId, subDocument) + .refreshTrue + ) + query = range(TestDocument.intField).gte(0) + res <- Executor.execute(ElasticRequest.search(secondSearchIndex, query)).documentAs[TestDocument] + } yield res + + assertZIO(result.exit)( + fails( + isSubtype[Exception]( + assertException("Could not parse all documents successfully: .dateField(missing)") + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document which contains a specific prefix using a prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.prefix( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a disjunction max query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"This is a ${firstDocument.stringField} test.") + secondDocumentUpdated = + secondDocument.copy(stringField = + s"This is a ${secondDocument.stringField} test. It should be in the list before ${firstDocument.stringField}, because it has higher relevance score than ${firstDocument.stringField}" + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + .refreshTrue + ) + query = disjunctionMax( + term( + field = TestDocument.stringField, + value = firstDocument.stringField.toLowerCase + ), + matchPhrase( + field = TestDocument.stringField, + value = secondDocument.stringField + ) + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a fuzzy query") { + checkOnce(genDocumentId, genTestDocument) { (firstDocumentId, firstDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument).refreshTrue + ) + query = ElasticQuery.fuzzy( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.substring(1) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield { + assert(res)(Assertion.contains(firstDocument)) + } + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which contains a term using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.contains( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which starts with a term using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.startsWith( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which conforms to a pattern using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = wildcard( + field = TestDocument.stringField.keyword, + value = s"${firstDocument.stringField.take(2)}*${firstDocument.stringField.takeRight(2)}" + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match all query with index pattern") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + firstDocumentCopy = firstDocument.copy(stringField = "this is test") + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) + .refreshTrue + ) + secondDocumentCopy = secondDocument.copy(stringField = "this is test") + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) + .refreshTrue + ) + query = matchAll + res <- Executor + .execute(ElasticRequest.search(IndexPattern("search-index*"), query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( + Assertion.contains(secondDocumentCopy) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document using a match boolean prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "test this is boolean") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchBooleanPrefix(TestDocument.stringField, "this is test bo") + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match phrase query with multi index") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + firstDocumentCopy = firstDocument.copy(stringField = "this is test") + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) + .refreshTrue + ) + secondDocumentCopy = secondDocument.copy(stringField = "this is test") + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) + .refreshTrue + ) + query = matchPhrase( + field = TestDocument.stringField, + value = firstDocumentCopy.stringField + ) + + res <- Executor + .execute(ElasticRequest.search(MultiIndex.names(firstSearchIndex, secondSearchIndex), query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( + Assertion.contains(secondDocumentCopy) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document using a match phrase query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchPhrase( + field = TestDocument.stringField, + value = firstDocument.stringField + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(document)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match phrase prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = s"${firstDocument.stringField} test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchPhrasePrefix( + field = TestDocument.stringField, + value = s"${firstDocument.stringField} te" + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a multi match query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + + query = + multiMatch(value = "test").fields(TestDocument.stringField).matchingType(BestFields) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") + secondDocumentUpdated = + secondDocument.copy(stringField = s"this is ${secondDocument.stringField} another test") + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + ) + .refreshTrue + ) + query = terms( + field = TestDocument.stringField, + values = firstDocument.stringField.toLowerCase, + secondDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocumentUpdated, secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms set query with minimumShouldMatchField") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) + secondDocumentUpdated = + secondDocument.copy( + stringField = + s"this is ${secondDocument.stringField} another test, not ${firstDocument.stringField}", + intField = 2 + ) + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + .refreshTrue + ) + query = termsSet( + field = "stringField", + minimumShouldMatchField = "intField", + terms = secondDocument.stringField.toLowerCase, + firstDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms set query with minimumShouldMatchScript") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) + secondDocumentUpdated = + secondDocument.copy( + stringField = s"this is ${secondDocument.stringField} test, not ${firstDocument.stringField}", + intField = 2 + ) + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + .refreshTrue + ) + query = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = firstDocument.stringField.toLowerCase, + secondDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using nested query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + nested(path = TestDocument.subDocumentList, query = matchAll) + res <- + Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument, secondDocument))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using should with satisfying minimumShouldMatch condition") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should( + matches(TestDocument.stringField, firstDocument.stringField), + matches(TestDocument.intField, firstDocument.intField), + matches(TestDocument.doubleField, firstDocument.doubleField + 1) + ).minimumShouldMatch(2) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using script query") { + checkN(4)(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.script(Script("doc['booleanField'].value == true")) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument, secondDocument).filter(_.booleanField == true))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document that doesn't exist using regexp query without case insensitive ") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + ElasticQuery.regexp( + field = TestDocument.stringField, + value = + s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}".toUpperCase + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(!Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using regexp query with case insensitive") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery + .regexp( + field = TestDocument.stringField, + value = s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}" + ) + .caseInsensitiveTrue + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(firstDocument)) && assert(res)( + !Assertion.contains(secondDocument) + )) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using should with unsatisfying minimumShouldMatch condition") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should( + matches(TestDocument.stringField, firstDocument.stringField), + matches(TestDocument.intField, firstDocument.intField + 1), + matches(TestDocument.doubleField, firstDocument.doubleField + 1) + ).minimumShouldMatch(2) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(isEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents with inner hits")( + test("search for a document using nested query with inner hits") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + nested(path = TestDocument.subDocumentList, query = matchAll).innerHits + result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) + items <- result.items + res = + items.map(_.innerHitAs[TestSubDocument]("subDocumentList")).collect { case Right(value) => value } + } yield assert(res)( + hasSameElements(List(firstDocument.subDocumentList, secondDocument.subDocumentList)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents with highlights")( + test("successfully find document with highlight") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) + ) + items <- res.items + } yield assert(items.map(_.highlight("stringField")))( + hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find inner hit document with highlight") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = nested( + path = TestDocument.subDocumentList, + query = must( + matches( + TestSubDocument.stringField, + secondDocument.subDocumentList.headOption.map(_.stringField).getOrElse("foo") + ) + ) + ).innerHits( + InnerHits().highlights(highlight(TestSubDocument.stringField)) + ) + result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) + items <- result.items + res = items + .flatMap(_.innerHit("subDocumentList")) + .flatten + .flatMap(_.highlight("subDocumentList.stringField")) + .flatten + } yield assert(res)( + Assertion.contains( + secondDocument.subDocumentList.headOption + .map(doc => s"${doc.stringField}") + .getOrElse("foo") + ) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight using field accessor") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight(TestDocument.stringField)) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlights and return highlights map successfully") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) + ) + items <- res.items + } yield assert(items.map(_.highlights))( + hasSameElements(List(Some(Map("stringField" -> Chunk(s"${firstDocument.stringField}"))))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight while using global config") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest + .search(firstSearchIndex, query) + .highlights( + highlight(TestDocument.stringField) + .withGlobalConfig("pre_tags", Arr(Str("
    "))) + .withGlobalConfig("post_tags", Arr(Str("
"))) + ) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight while using local config to overwrite global config") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest + .search(firstSearchIndex, query) + .highlights( + highlight( + TestDocument.stringField, + config = Map("pre_tags" -> Arr(Str("
    ")), "post_tags" -> Arr(Str("
"))) + ) + .withGlobalConfig("pre_tags", Arr(Str("
    "))) + .withGlobalConfig("post_tags", Arr(Str("
"))) + ) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("searching for sorted documents")( + test("search for document sorted by descending age and by ascending birthDate using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val firstDocumentWithFixedIntField = + firstDocument.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) + val secondDocumentWithFixedIntField = + secondDocument.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentWithFixedIntField + ) + .refreshTrue + ) + query = range(TestDocument.intField).gte(20) + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort( + sortBy(TestDocument.intField).order(Desc), + sortBy(TestDocument.dateField).order(Asc).format("strict_date_optional_time_nanos") + ) + ) + .documentAs[TestDocument] + } yield assert(res)( + equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for document sorted by script where age is ascending using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstEmployee, secondDocumentId, secondEmployee) => + val firstDocumentWithFixedIntField = + firstEmployee.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) + val secondDocumentWithFixedIntField = + secondEmployee.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentWithFixedIntField + ) + .refreshTrue + ) + query = range(TestDocument.intField).gte(20) + res <- + Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort(sortBy(Script("doc['intField'].value").lang(Painless), NumberType).order(Asc)) + ) + .documentAs[TestDocument] + } yield assert(res)( + equalTo(Chunk(firstDocumentWithFixedIntField, secondDocumentWithFixedIntField)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for document sorted descending with 'max' mode by one field using matchAll query") { + checkOnce(genDocumentId, genTestSubDocument, genDocumentId, genTestSubDocument) { + (firstDocumentId, firstSubDocument, secondDocumentId, secondSubDocument) => + val firstSubDocumentWithFixedIntList = firstSubDocument.copy(intFieldList = List(11, 4, 37)) + val secondSubDocumentWithFixedIntList = secondSubDocument.copy(intFieldList = List(30, 29, 35)) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument](firstSearchIndex, firstDocumentId, firstSubDocumentWithFixedIntList) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument]( + firstSearchIndex, + secondDocumentId, + secondSubDocumentWithFixedIntList + ) + .refreshTrue + ) + query = matchAll + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort(sortBy(TestSubDocument.intFieldList).mode(Max).order(Desc)) + ) + .documentAs[TestSubDocument] + } yield assert(res)( + equalTo(Chunk(firstSubDocumentWithFixedIntList, secondSubDocumentWithFixedIntList)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using scroll API and returning them as a stream")( + test("search for documents using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) + } yield assert(res)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for documents using range query with multiple pages") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 203).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream( + ElasticRequest.search(secondSearchIndex, query) + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(204))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for documents using range query with multiple pages and return type") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = + ZSink.collectAll[TestDocument] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .streamAs[TestDocument](ElasticRequest.search(secondSearchIndex, query)) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for documents using range query - empty stream") { + val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) + } yield assert(res)(hasSize(equalTo(0))) + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using PIT (point in time) and returning them as a stream")( + test("successfully create PIT and return stream results") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test( + "successfully create PIT and return stream results with changed page size and different keep alive parameters" + ) { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream( + ElasticRequest.search(secondSearchIndex, query), + StreamConfig.SearchAfter.withPageSize(40).keepAliveFor("2m") + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("successfully create PIT(point in time) and return stream results as specific type") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = + ZSink.collectAll[TestDocument] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .streamAs[TestDocument]( + ElasticRequest.search(secondSearchIndex, query), + StreamConfig.SearchAfter + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("successfully create point in time and return empty stream if there is no valid results") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(200.0) + res <- Executor + .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) + .run(sink) + } yield assert(res)(isEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using SearchAfter Query")( + test("search for document sorted by ascending age while using search after query") { + checkOnce(genTestDocument) { firstDocument => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + reqs = (0 to 100).map { i => + ElasticRequest.create[TestDocument]( + firstSearchIndex, + firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.intField).gte(10) + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + sa <- res.lastSortValue + res2 <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .searchAfter(sa.get) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + .documentAs[TestDocument] + } yield assert(res2.map(_.intField))( + equalTo(Chunk.fromIterable(20 to 29)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ shrinks(0) + ), + suite("deleting by query")( + test("successfully delete all matched documents") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + firstDocumentId, + firstDocument.copy(doubleField = 150) + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + secondDocumentId, + secondDocument.copy(doubleField = 350) + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + thirdDocumentId, + thirdDocument.copy(doubleField = 400) + ) + .refreshTrue + ) + deleteQuery = range(TestDocument.doubleField).gte(300.0) + _ <- Executor + .execute(ElasticRequest.deleteByQuery(deleteByQueryIndex, deleteQuery).refreshTrue) + res <- Executor + .execute(ElasticRequest.search(deleteByQueryIndex, matchAll)) + .documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument.copy(doubleField = 150)))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(deleteByQueryIndex)), + Executor.execute(ElasticRequest.deleteIndex(deleteByQueryIndex)).orDie + ), + test("returns NotFound when provided index is missing") { + checkOnce(genIndexName) { missingIndex => + assertZIO(Executor.execute(ElasticRequest.deleteByQuery(missingIndex, matchAll)))( + equalTo(DeletionOutcome.NotFound) + ) + } + } + ), + suite("bulk query")( + test("successfully execute bulk query") { + checkOnce(genDocumentId, genDocumentId, genDocumentId, genTestDocument) { + (firstDocumentId, secondDocumentId, thirdDocumentId, document) => + for { + _ <- Executor.execute( + ElasticRequest + .create[TestDocument](index, firstDocumentId, document.copy(stringField = "randomIdString")) + ) + _ <- + Executor.execute( + ElasticRequest + .create[TestDocument](index, secondDocumentId, document.copy(stringField = "randomIdString2")) + .refreshTrue + ) + req1 = ElasticRequest.create[TestDocument](index, thirdDocumentId, document) + req2 = ElasticRequest.create[TestDocument](index, document.copy(stringField = "randomIdString3")) + req3 = ElasticRequest.upsert[TestDocument](index, firstDocumentId, document.copy(doubleField = 3000)) + req4 = ElasticRequest.deleteById(index, secondDocumentId) + req5 = ElasticRequest.update[TestDocument](index, thirdDocumentId, document.copy(intField = 100)) + req6 = ElasticRequest.updateByScript( + index, + firstDocumentId, + Script("ctx._source.intField = params['factor']").params("factor" -> 100) + ) + req7 = + ElasticRequest + .update[TestDocument](index, DocumentId("invalid-document-id"), document.copy(intField = 100)) + res <- + Executor.execute(ElasticRequest.bulk(req1, req2, req3, req4, req5, req6, req7).refreshTrue) + doc1 <- Executor.execute(ElasticRequest.getById(index, firstDocumentId)).documentAs[TestDocument] + doc2 <- Executor.execute(ElasticRequest.getById(index, secondDocumentId)).documentAs[TestDocument] + doc3 <- Executor.execute(ElasticRequest.getById(index, thirdDocumentId)).documentAs[TestDocument] + } yield assert(res.items.size)(equalTo(7)) && + assert(res.items.map(_.error.isDefined))( + equalTo(Chunk(false, false, false, false, false, false, true)) + ) && + assert(res.items(6).status)(equalTo(Some(404))) && + assert(res.items(6).error.map(_.`type`))(equalTo(Some("document_missing_exception"))) && + assert(doc3)(isSome(equalTo(document.copy(intField = 100)))) && + assert(doc2)(isNone) && assert(doc1)( + isSome(equalTo(document.copy(doubleField = 3000, intField = 100))) + ) + } + } + ), + suite("updating document")( + test("successfully update document with script") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val intField = document.intField + val factor = 2 + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + _ <- Executor.execute( + ElasticRequest.updateByScript( + index, + documentId, + Script("ctx._source.intField += params['factor']").params("factor" -> factor) + ) + ) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document.copy(intField = intField + factor)))) + } + }, + test("successfully create document if it does not exist") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute( + ElasticRequest + .updateByScript( + index, + documentId, + Script("ctx._source.intField += params['factor']").params("factor" -> 2) + ) + .orCreate(document) + ) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document))) + } + }, + test("successfully update document with doc") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) + _ <- Executor.execute(ElasticRequest.update[TestDocument](index, documentId, secondDocument)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(secondDocument))) + } + } + ), + suite("updating document by query")( + test("successfully update document with only script") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val stringField = "StringField" + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, document).refreshTrue + ) + updateRes <- + Executor.execute( + ElasticRequest + .updateAllByQuery( + updateByQueryIndex, + Script("ctx._source['stringField'] = params['str']").params("str" -> stringField) + ) + .refreshTrue + ) + doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] + } yield assert(updateRes)( + equalTo( + UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) + ) + ) && assert(doc)(isSome(equalTo(document.copy(stringField = stringField)))) + } + }, + test("successfully update document with script and query") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val newDocument = document.copy(stringField = "StringField") + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, newDocument).refreshTrue + ) + updateRes <- + Executor.execute( + ElasticRequest + .updateByQuery( + index = updateByQueryIndex, + query = term(field = TestDocument.stringField.keyword, value = "StringField"), + script = Script("ctx._source['intField']++") + ) + .refreshTrue + ) + doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] + } yield assert(updateRes)( + equalTo( + UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) + ) + ) && assert(doc)(isSome(equalTo(newDocument.copy(intField = newDocument.intField + 1)))) + } + } + ), + suite("geo-distance query")( + test("using geo-distance query") { + checkOnce(genTestDocument) { document => + val indexDefinition = + """ + |{ + | "mappings": { + | "properties": { + | "geoPointField": { + | "type": "geo_point" + | } + | } + | } + |} + |""".stripMargin + + for { + _ <- Executor.execute(ElasticRequest.createIndex(geoDistanceIndex, indexDefinition)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(geoDistanceIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](geoDistanceIndex, document).refreshTrue + ) + result <- Executor + .execute( + ElasticRequest.search( + geoDistanceIndex, + ElasticQuery + .geoDistance( + "geoPointField", + GeoPoint(document.geoPointField.lat, document.geoPointField.lon), + Distance(300, Kilometers) + ) + ) + ) + .documentAs[TestDocument] + } yield assert(result)(equalTo(Chunk(document))) + } + } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoDistanceIndex)).orDie) + ), + suite("geo-polygon query")( + test("using geo-polygon query") { + checkOnce(genTestDocument) { document => + val indexDefinition = + """ + |{ + | "mappings": { + | "properties": { + | "geoPointField": { + | "type": "geo_point" + | } + | } + | } + |} + |""".stripMargin + + for { + _ <- Executor.execute(ElasticRequest.createIndex(geoPolygonIndex, indexDefinition)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(geoPolygonIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](geoPolygonIndex, document).refreshTrue + ) + + r1 <- Executor + .execute( + ElasticRequest.search( + geoPolygonIndex, + ElasticQuery + .geoPolygon("geoPointField", Chunk("0, 0", "0, 90", "90, 90", "90, 0")) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)(equalTo(Chunk(document))) + } + } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoPolygonIndex)).orDie) + ), + suite("search for documents using FunctionScore query")( + test("using randomScore function") { + checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => + val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue + ) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + firstSearchIndex, + secondDocumentUpdated + ) + .refreshTrue + ) + r1 <- Executor + .execute( + ElasticRequest.search( + firstSearchIndex, + ElasticQuery + .functionScore(randomScoreFunction()) + .query(matches("stringField", firstDocument.stringField)) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)( + hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("using randomScore function and weight function") { + checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => + val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue + ) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + firstSearchIndex, + secondDocumentUpdated + ) + .refreshTrue + ) + r1 <- Executor + .execute( + ElasticRequest.search( + firstSearchIndex, + ElasticQuery + .functionScore( + FunctionScoreFunction.randomScoreFunction(), + FunctionScoreFunction.weightFunction(2) + ) + .query(matches("stringField", firstDocument.stringField)) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Max) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)( + hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) + ) @@ nondeterministic @@ sequential @@ prepareElasticsearchIndexForTests @@ afterAll( + Executor.execute(ElasticRequest.deleteIndex(index)).orDie + ) + ).provideShared( + elasticsearchLayer + ) + } +} diff --git a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala index 2fa580a6f..62fa4dd6c 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala @@ -1,113 +1,113 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import sttp.client4.httpclient.zio.HttpClientZioBackend -import zio._ -import zio.elasticsearch.ElasticQuery.matchAll -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain._ -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.utils.unsafeWrap -import zio.test.Assertion.{containsString, hasMessage} -import zio.test.CheckVariants.CheckN -import zio.test.TestAspect.beforeAll -import zio.test.{Assertion, Gen, TestAspect, ZIOSpecDefault, checkN} - -import java.time.LocalDate - -trait IntegrationSpec extends ZIOSpecDefault { - - val elasticsearchLayer: TaskLayer[Executor] = HttpClientZioBackend.layer() >>> ElasticExecutor.local - - val index: IndexName = IndexName("users") - - val deleteByQueryIndex: IndexName = IndexName("delete-by-query-index") - - val firstSearchIndex: IndexName = IndexName("search-index-1") - - val secondSearchIndex: IndexName = IndexName("search-index-2") - - val createIndexTestName: IndexName = IndexName("create-index-test-name") - - val firstCountIndex: IndexName = IndexName("count-index-1") - - val secondCountIndex: IndexName = IndexName("count-index-2") - - val updateByQueryIndex: IndexName = IndexName("update-by-query-index") - - val geoDistanceIndex: IndexName = IndexName("geo-distance-index") - - val refreshFailIndex: IndexName = IndexName("refresh-fail") - - val IndexPatternAll: IndexPattern = IndexPattern("_all") - - val geoPolygonIndex: IndexName = IndexName("geo-polygon-index") - - val prepareElasticsearchIndexForTests: TestAspect[Nothing, Any, Throwable, Any] = beforeAll((for { - _ <- Executor.execute(ElasticRequest.createIndex(index)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(index, matchAll).refreshTrue) - } yield ()).provide(elasticsearchLayer)) - - def genIndexName: Gen[Any, IndexName] = - Gen.stringBounded(10, 40)(Gen.alphaChar).map(name => unsafeWrap(name.toLowerCase)(IndexName)) - - def genDocumentId: Gen[Any, DocumentId] = - Gen.stringBounded(10, 40)(Gen.alphaNumericChar).map(DocumentId(_)) - - def genGeoPoint: Gen[Any, GeoPoint] = - for { - latitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) - longitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) - } yield GeoPoint(latitude, longitude) - - def genTestDocument: Gen[Any, TestDocument] = for { - stringField <- Gen.stringBounded(5, 10)(Gen.alphaChar) - dateField <- Gen.localDate(LocalDate.parse("2010-12-02"), LocalDate.parse("2022-12-05")) - subDocumentList <- Gen.listOfBounded(1, 3)(genTestSubDocument) - intField <- Gen.int(1, 2000) - doubleField <- Gen.double(100, 2000) - booleanField <- Gen.boolean - geoPointField <- genGeoPoint - vectorField <- Gen.listOfN(5)(Gen.int(-10, 10)) - } yield TestDocument( - stringField = stringField, - dateField = dateField, - subDocumentList = subDocumentList, - intField = intField, - doubleField = doubleField, - booleanField = booleanField, - geoPointField = geoPointField, - vectorField = vectorField - ) - - def genTestSubDocument: Gen[Any, TestSubDocument] = for { - stringField1 <- Gen.stringBounded(5, 10)(Gen.alphaChar) - stringField2 <- Gen.stringBounded(5, 10)(Gen.alphaChar) - longField <- Gen.long(1, 75) - intField <- Gen.int(1, 200) - } yield TestSubDocument( - stringField = stringField1, - nestedField = TestNestedField(stringField2, longField), - intField = intField, - intFieldList = Nil - ) - - def checkOnce: CheckN = checkN(1) - - def assertException(substring: String): Assertion[Throwable] = hasMessage(containsString(substring)) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import sttp.client4.httpclient.zio.HttpClientZioBackend +import zio._ +import zio.elasticsearch.ElasticQuery.matchAll +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain._ +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.utils.unsafeWrap +import zio.test.Assertion.{containsString, hasMessage} +import zio.test.CheckVariants.CheckN +import zio.test.TestAspect.beforeAll +import zio.test.{Assertion, Gen, TestAspect, ZIOSpecDefault, checkN} + +import java.time.LocalDate + +trait IntegrationSpec extends ZIOSpecDefault { + + val elasticsearchLayer: TaskLayer[Executor] = HttpClientZioBackend.layer() >>> ElasticExecutor.local + + val index: IndexName = IndexName("users") + + val deleteByQueryIndex: IndexName = IndexName("delete-by-query-index") + + val firstSearchIndex: IndexName = IndexName("search-index-1") + + val secondSearchIndex: IndexName = IndexName("search-index-2") + + val createIndexTestName: IndexName = IndexName("create-index-test-name") + + val firstCountIndex: IndexName = IndexName("count-index-1") + + val secondCountIndex: IndexName = IndexName("count-index-2") + + val updateByQueryIndex: IndexName = IndexName("update-by-query-index") + + val geoDistanceIndex: IndexName = IndexName("geo-distance-index") + + val refreshFailIndex: IndexName = IndexName("refresh-fail") + + val IndexPatternAll: IndexPattern = IndexPattern("_all") + + val geoPolygonIndex: IndexName = IndexName("geo-polygon-index") + + val prepareElasticsearchIndexForTests: TestAspect[Nothing, Any, Throwable, Any] = beforeAll((for { + _ <- Executor.execute(ElasticRequest.createIndex(index)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(index, matchAll).refreshTrue) + } yield ()).provide(elasticsearchLayer)) + + def genIndexName: Gen[Any, IndexName] = + Gen.stringBounded(10, 40)(Gen.alphaChar).map(name => unsafeWrap(name.toLowerCase)(IndexName)) + + def genDocumentId: Gen[Any, DocumentId] = + Gen.stringBounded(10, 40)(Gen.alphaNumericChar).map(DocumentId(_)) + + def genGeoPoint: Gen[Any, GeoPoint] = + for { + latitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) + longitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) + } yield GeoPoint(latitude, longitude) + + def genTestDocument: Gen[Any, TestDocument] = for { + stringField <- Gen.stringBounded(5, 10)(Gen.alphaChar) + dateField <- Gen.localDate(LocalDate.parse("2010-12-02"), LocalDate.parse("2022-12-05")) + subDocumentList <- Gen.listOfBounded(1, 3)(genTestSubDocument) + intField <- Gen.int(1, 2000) + doubleField <- Gen.double(100, 2000) + booleanField <- Gen.boolean + geoPointField <- genGeoPoint + vectorField <- Gen.listOfN(5)(Gen.int(-10, 10)) + } yield TestDocument( + stringField = stringField, + dateField = dateField, + subDocumentList = subDocumentList, + intField = intField, + doubleField = doubleField, + booleanField = booleanField, + geoPointField = geoPointField, + vectorField = vectorField + ) + + def genTestSubDocument: Gen[Any, TestSubDocument] = for { + stringField1 <- Gen.stringBounded(5, 10)(Gen.alphaChar) + stringField2 <- Gen.stringBounded(5, 10)(Gen.alphaChar) + longField <- Gen.long(1, 75) + intField <- Gen.int(1, 200) + } yield TestSubDocument( + stringField = stringField1, + nestedField = TestNestedField(stringField2, longField), + intField = intField, + intFieldList = Nil + ) + + def checkOnce: CheckN = checkN(1) + + def assertException(substring: String): Assertion[Throwable] = hasMessage(containsString(substring)) +} diff --git a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala index e7da783a8..0b52c4978 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala @@ -169,7 +169,7 @@ object ElasticAggregation { * @param field * The field for which the IP range aggregation will be executed * @param ranges - * A chunk of IP range bounds specifying the ranges. + * A chunk of IP range bounds specifying the ranges * @param subAggregations * Optional map of sub-aggregations to nest within this aggregation * @return @@ -180,24 +180,24 @@ object ElasticAggregation { field: Field[_, String], ranges: Chunk[IpRange.IpRangeBound] ): IpRangeAggregation = - IpRange(name = name, field = field.toString, ranges = ranges, keyed = None, subAggregations = Chunk.empty) + IpRange(name = name, field = field.toString, ranges = ranges, keyed = None, subAggregations = None) /** * Constructs an instance of [[zio.elasticsearch.aggregation.IpRangeAggregation]] using the specified parameters. * * @param name - * Aggregation name. + * Aggregation name * @param field - * The field (as string) for which the IP range aggregation will be executed. + * The field (as string) for which the IP range aggregation will be executed * @param ranges - * A chunk of IP range bounds specifying the ranges. + * A chunk of IP range bounds specifying the ranges * @param subAggregations - * Optional map of sub-aggregations to nest within this aggregation. + * Optional map of sub-aggregations to nest within this aggregation * @return * An instance of [[IpRangeAggregation]] configured with the provided parameters. */ def ipRangeAggregation(name: String, field: String, ranges: Chunk[IpRange.IpRangeBound]): IpRangeAggregation = - IpRange(name = name, field = field, ranges = ranges, keyed = None, subAggregations = Chunk.empty) + IpRange(name = name, field = field, ranges = ranges, keyed = None, subAggregations = None) /** * Constructs a type-safe instance of [[zio.elasticsearch.aggregation.MaxAggregation]] using the specified parameters. diff --git a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala index 89afa7d52..935d6eaea 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala @@ -212,23 +212,26 @@ final case class IpRange( field: String, ranges: Chunk[IpRange.IpRangeBound], keyed: Option[Boolean], - subAggregations: Chunk[SingleElasticAggregation] + subAggregations: Option[Chunk[SingleElasticAggregation]] ) extends IpRangeAggregation { self => - def keyed(value: Boolean): IpRangeAggregation = - self.copy(keyed = Some(value)) + def keyed(value: Boolean): IpRangeAggregation = self.copy(keyed = Some(value)) def withAgg(aggregation: SingleElasticAggregation): MultipleAggregations = multipleAggregations.aggregations(self, aggregation) def withSubAgg(aggregation: SingleElasticAggregation): IpRangeAggregation = - self.copy(subAggregations = aggregation +: subAggregations) + self.copy(subAggregations = Some(aggregation +: subAggregations.getOrElse(Chunk.empty))) private[elasticsearch] def toJson: Json = { - val rangesJson = ranges.map(_.toJson) + val rangesJson = ranges.map(_.toJson) val keyedJson = keyed.fold(Obj())(k => Obj("keyed" -> k.toJson)) - val subAggsJson = subAggregations.nonEmptyOrElse(Obj())(sa => Obj("aggs" -> sa.map(_.toJson).reduce(_ merge _))) + val subAggsJson = subAggregations match { + case Some(aggs) if aggs.nonEmpty => + Obj("aggs" -> aggs.map(_.toJson).reduce(_ merge _)) + case _ => Obj() + } Obj( name -> ( diff --git a/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala b/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala index da509a1e2..d01867e50 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala @@ -1,4678 +1,4678 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticHighlight.highlight -import zio.elasticsearch.ElasticQuery.{script => _, _} -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain._ -import zio.elasticsearch.query.DistanceType.Plane -import zio.elasticsearch.query.DistanceUnit.Kilometers -import zio.elasticsearch.query.FunctionScoreFunction._ -import zio.elasticsearch.query.MultiMatchType._ -import zio.elasticsearch.query.MultiValueMode.Max -import zio.elasticsearch.query.ValidationMethod.IgnoreMalformed -import zio.elasticsearch.query._ -import zio.elasticsearch.script.{Painless, Script} -import zio.elasticsearch.utils._ -import zio.test.Assertion.equalTo -import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assert} - -import java.time.LocalDate - -object ElasticQuerySpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("ElasticQuery")( - suite("constructing")( - suite("bool")( - test("filter") { - val query = filter(matches(TestDocument.stringField, "test"), matches(field = "testField", "test field")) - val queryWithBoost = - filter(matches(TestDocument.stringField, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "intField", value = 22) - ), - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("must") { - val query = must(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - must(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("mustNot") { - val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("should") { - val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - val queryWithMinimumShouldMatch = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).minimumShouldMatch(2) - val queryWithAllParams = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).boost(3.14).minimumShouldMatch(2) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) && assert(queryWithMinimumShouldMatch)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22), - Exists(field = "booleanField", boost = None) - ), - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22), - Exists(field = "booleanField", boost = None) - ), - boost = Some(3.14), - minimumShouldMatch = Some(2) - ) - ) - ) - }, - test("filter + must + mustNot + should") { - val query1 = - filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) - val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) - .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) - val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) - .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) - .mustNot(matches(TestDocument.intField, 50)) - val queryWithBoost = query1.boost(3.14) - val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) - val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) - - assert(query1)( - equalTo( - Bool[TestDocument]( - filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), - must = Chunk(Match(field = "booleanField", value = true)), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(query2)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk( - Match(field = "doubleField", value = 3.14), - Match(field = "testField", value = true), - Exists(field = "anotherTestField", boost = None) - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(query3)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk(Match(field = "intField", value = 50)), - should = Chunk( - Range( - field = "intField", - lower = GreaterThan(1), - upper = LessThanOrEqualTo(100), - boost = None, - format = None - ), - Match(field = "stringField", value = "test") - ), - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), - must = Chunk(Match(field = "booleanField", value = true)), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(3.14), - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk( - Match(field = "doubleField", value = 3.14), - Match(field = "testField", value = true), - Exists(field = "anotherTestField", boost = None) - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk(Match(field = "intField", value = 50)), - should = Chunk( - Range( - field = "intField", - lower = GreaterThan(1), - upper = LessThanOrEqualTo(100), - boost = None, - format = None - ), - Match(field = "stringField", value = "test") - ), - boost = Some(3.14), - minimumShouldMatch = Some(3) - ) - ) - ) - } - ), - test("boosting") { - val query = boosting(0.5f, exists("testField"), terms("booleanField", true, false)) - val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) - - assert(query)( - equalTo( - Boosting[Any]( - negativeBoost = 0.5f, - negativeQuery = exists("testField"), - positiveQuery = terms("booleanField", true, false) - ) - ) - ) && assert(queryTs)( - equalTo( - Boosting[TestDocument]( - negativeBoost = 0.5f, - negativeQuery = exists(TestDocument.stringField), - positiveQuery = terms(TestDocument.booleanField, true, false) - ) - ) - ) - }, - test("constantScore") { - val query = constantScore(terms("stringField", "a", "b", "c")) - val queryTs = constantScore(terms(TestDocument.stringField, "a", "b", "c")) - val queryWithBoost = constantScore(terms(TestDocument.stringField, "a", "b", "c")).boost(2.2) - - assert(query)( - equalTo( - ConstantScore[Any]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = None - ) - ) - ) && - assert(queryTs)( - equalTo( - ConstantScore[TestDocument]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - ConstantScore[TestDocument]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = Some(2.2) - ) - ) - ) - }, - test("contains") { - val query = contains("testField", "test") - val queryTs = contains(TestDocument.stringField, "test") - val queryWithSuffix = contains(TestDocument.stringField.raw, "test") - val queryWithBoost = contains(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = contains(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "*test*", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "*test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "*test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = None, - caseInsensitive = Some(true) - ) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("disjunctionMax") { - val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) - val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) - val queryWithTieBreaker = disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) - - assert(query)( - equalTo( - DisjunctionMax[Any]( - queries = - Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = None - ) - ) - ) && - assert(queryTs)( - equalTo( - DisjunctionMax[TestDocument]( - queries = - Chunk(Exists[Any](field = "stringField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = None - ) - ) - ) && - assert(queryWithTieBreaker)( - equalTo( - DisjunctionMax[Any]( - queries = - Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = Some(0.5f) - ) - ) - ) - }, - test("exists") { - val query = exists("testField") - val queryTs = exists(TestDocument.intField) - val queryWithBoost = exists(TestDocument.intField).boost(3) - - assert(query)(equalTo(Exists[Any](field = "testField", boost = None))) && - assert(queryTs)(equalTo(Exists[TestDocument](field = "intField", boost = None))) && - assert(queryWithBoost)(equalTo(Exists[TestDocument](field = "intField", boost = Some(3)))) - - }, - test("functionScore") { - val scriptScore = scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")) - val weight = weightFunction(10.0) - val randomScore = randomScoreFunction() - val fieldValue = fieldValueFactor(TestDocument.stringField) - val decay = expDecayFunction("field", origin = "11, 12", scale = "2km") - val typedDecay = expDecayFunction(TestDocument.intField, origin = "11,12", scale = "2km") - - val fullQuery: FunctionScoreQuery[TestDocument] = functionScore(scriptScore, weight, randomScore) - .withFunctions(decay) - .withFunctions(fieldValue) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Avg) - .maxBoost(42) - .minScore(32) - .query(matches("stringField", "value")) - .scoreMode(FunctionScoreScoreMode.Min) - - val queryWithType: FunctionScoreQuery[TestDocument] = - functionScore(fieldValue).query(matches(TestDocument.stringField, "value")) - val queryTypeShrink: FunctionScoreQuery[TestDocument] = - functionScore(scriptScore).query(matches(TestDocument.stringField, "value")) - val queryWithoutTypeShrink: FunctionScoreQuery[Any] = - functionScore(scriptScore).query(matches("stringField", "value")) - val queryWithNewAnyQuery: FunctionScoreQuery[TestDocument] = - functionScore(fieldValue).query(matches("stringField", "value")) - - val anyQueryWithNewTypedFunction = functionScore(scriptScore).withFunctions(fieldValue) - val anyQueryWithNewAnyFunction = functionScore(scriptScore).withFunctions(weight) - val typedQueryWithNewTypedFunction = functionScore(fieldValue).withFunctions(typedDecay) - val typedQueryWithNewAnyFunction = functionScore(fieldValue).withFunctions(weight) - - assert(fullQuery)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk( - scriptScore, - weight, - randomScore, - decay, - fieldValue - ), - boost = Some(2.0), - boostMode = Some(FunctionScoreBoostMode.Avg), - maxBoost = Some(42.0), - minScore = Some(32.0), - query = Some(Match("stringField", "value")), - scoreMode = Some(FunctionScoreScoreMode.Min) - ) - ) - ) && - assert(queryTypeShrink)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(scriptScore), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithType)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithoutTypeShrink)( - equalTo( - FunctionScore[Any]( - functionScoreFunctions = Chunk(scriptScore), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithNewAnyQuery)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(anyQueryWithNewTypedFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(scriptScore, fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(anyQueryWithNewAnyFunction)( - equalTo( - FunctionScore[Any]( - functionScoreFunctions = Chunk(scriptScore, weight), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(typedQueryWithNewTypedFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue, typedDecay), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(typedQueryWithNewAnyFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue, weight), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) - }, - test("fuzzy") { - val query = fuzzy("stringField", "test") - val queryTs = fuzzy(TestDocument.stringField, "test") - val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") - val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) - val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) - val queryWithAllParameters = - fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) - val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") - - assert(query)( - equalTo( - Fuzzy[Any]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryTs)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithFuzzinessAuto)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = Some("AUTO"), - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithMaxExpansions)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = Some(50), - prefixLength = None - ) - ) - ) && - assert(queryWithPrefixLength)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = Some(3) - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField.raw", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithAllParameters)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = Some("AUTO"), - maxExpansions = Some(50), - prefixLength = Some(3) - ) - ) - ) - }, - test("geoDistance") { - val queryWithHash = - geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) - val queryWithPoint = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - val queryWithDistanceType = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) - val queryWithName = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") - val queryWithValidationMethod = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - .validationMethod(IgnoreMalformed) - .distanceType(Plane) - .name("name") - - assert(queryWithHash)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "drm3btev3e86", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = None - ) - ) - ) && - assert(queryWithPoint)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithDistanceType)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = Some(Plane), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithName)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = Some("name"), - validationMethod = None - ) - ) - ) && assert(queryWithValidationMethod)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = Some(IgnoreMalformed) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = Some(Plane), - queryName = Some("name"), - validationMethod = Some(IgnoreMalformed) - ) - ) - ) - }, - test("geoPolygon") { - val query = - geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) - val queryTs = - geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) - val queryWithName = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") - val queryWithValidationMethod = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) - .validationMethod(IgnoreMalformed) - .name("name") - - assert(query)( - equalTo( - GeoPolygon[Any]( - field = "testField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryTs)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("drm3btev3e86", "drm3btev3e87"), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithName)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = Some("name"), - validationMethod = None - ) - ) - ) && assert(queryWithValidationMethod)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = None, - validationMethod = Some(IgnoreMalformed) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = Some("name"), - validationMethod = Some(IgnoreMalformed) - ) - ) - ) - }, - test("hasChild") { - val query = hasChild("child", matchAll) - val queryWithIgnoreUnmapped = hasChild("child", matchAll).ignoreUnmappedTrue - val queryWithInnerHits = hasChild("child", matchAll).innerHits - val queryWithMaxChildren = hasChild("child", matchAll).maxChildren(5) - val queryWithMinChildren = hasChild("child", matchAll).minChildren(1) - val queryWithScoreMode = hasChild("child", matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = hasChild("child", matchAll) - .scoreMode(ScoreMode.Avg) - .ignoreUnmappedTrue - .innerHits - .maxChildren(5) - .minChildren(1) - - assert(query)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithIgnoreUnmapped)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = Some(true), - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithInnerHits)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = Some(InnerHits()), - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithMaxChildren)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = Some(5), - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithMinChildren)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = Some(1), - scoreMode = None - ) - ) - ) && assert(queryWithScoreMode)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = Some(ScoreMode.Avg) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = Some(true), - innerHitsField = Some(InnerHits()), - maxChildren = Some(5), - minChildren = Some(1), - scoreMode = Some(ScoreMode.Avg) - ) - ) - ) - }, - test("hasParent") { - val query = hasParent("parent", matchAll) - val queryWithBoost = hasParent("parent", matchAll).boost(3) - val queryWithScoreTrue = hasParent("parent", matchAll).withScoreTrue - val queryWithScoreFalse = hasParent("parent", matchAll).withScoreFalse - val queryWithIgnoreUnmappedTrue = hasParent("parent", matchAll).ignoreUnmappedTrue - val queryWithIgnoreUnmappedFalse = hasParent("parent", matchAll).ignoreUnmappedFalse - val queryWithAllParams = hasParent("parent", matchAll).boost(3).ignoreUnmappedFalse.withScoreTrue - - assert(query)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = Some(3.0), - ignoreUnmapped = None, - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithScoreTrue)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = Some(true) - ) - ) - ) && assert(queryWithScoreFalse)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = Some(false) - ) - ) - ) && assert(queryWithIgnoreUnmappedTrue)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = Some(true), - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithIgnoreUnmappedFalse)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = Some(false), - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = Some(3.0), - ignoreUnmapped = Some(false), - innerHitsField = None, - score = Some(true) - ) - ) - ) - }, - test("ids") { - val idsQuery = ids("1", "2", "3") - - assert(idsQuery)( - equalTo( - Ids[Any]( - values = Chunk("1", "2", "3") - ) - ) - ) - }, - test("kNN") { - val queryString = kNN("stringField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBool = kNN("boolField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryInt = kNN("intField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryStringTs = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBoolTs = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryIntTs = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) - - assert(queryString)( - equalTo( - KNN[Any]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryBool)( - equalTo( - KNN[Any]( - field = "boolField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryInt)( - equalTo( - KNN[Any]( - field = "intField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - KNN[TestDocument]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - KNN[TestDocument]( - field = "booleanField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - KNN[TestDocument]( - field = "intField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryWithSimilarity)( - equalTo( - KNN[TestDocument]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = Some(3.14) - ) - ) - ) - }, - test("matchAll") { - val query = matchAll - val queryWithBoost = matchAll.boost(3.14) - - assert(query)(equalTo(MatchAll(boost = None))) && assert(queryWithBoost)( - equalTo(MatchAll(boost = Some(3.14))) - ) - }, - test("matchBooleanPrefix") { - val queryString = matchBooleanPrefix("stringField", "test") - val queryBool = matchBooleanPrefix("booleanField", true) - val queryInt = matchBooleanPrefix("intField", 1) - val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") - val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) - val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) - val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") - val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) - - assert(queryString)( - equalTo(MatchBooleanPrefix[Any, String](field = "stringField", value = "test", minimumShouldMatch = None)) - ) && - assert(queryBool)( - equalTo(MatchBooleanPrefix[Any, Boolean](field = "booleanField", value = true, minimumShouldMatch = None)) - ) && - assert(queryInt)( - equalTo(MatchBooleanPrefix[Any, Int](field = "intField", value = 1, minimumShouldMatch = None)) - ) && - assert(queryStringTs)( - equalTo( - MatchBooleanPrefix[TestDocument, String](field = "stringField", value = "test", minimumShouldMatch = None) - ) - ) && - assert(queryBoolTs)( - equalTo( - MatchBooleanPrefix[TestDocument, Boolean](field = "booleanField", value = true, minimumShouldMatch = None) - ) - ) && - assert(queryIntTs)( - equalTo(MatchBooleanPrefix[TestDocument, Int](field = "intField", value = 1, minimumShouldMatch = None)) - ) && - assert(queryWithSuffix)( - equalTo( - MatchBooleanPrefix[TestDocument, String]( - field = "stringField.raw", - value = "test", - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - MatchBooleanPrefix[TestDocument, String]( - field = "stringField", - value = "test", - minimumShouldMatch = Some(3) - ) - ) - ) - }, - test("matches") { - val queryString = matches("stringField", "test") - val queryBool = matches("booleanField", true) - val queryInt = matches("intField", 1) - val queryStringTs = matches(TestDocument.stringField, "test") - val queryBoolTs = matches(TestDocument.booleanField, true) - val queryIntTs = matches(TestDocument.intField, 1) - val queryWithSuffix = matches(TestDocument.stringField.raw, "test") - val queryWithBoost = matches(TestDocument.doubleField, 3.14) - - assert(queryString)(equalTo(Match[Any, String](field = "stringField", value = "test"))) && - assert(queryBool)(equalTo(Match[Any, Boolean](field = "booleanField", value = true))) && - assert(queryInt)(equalTo(Match[Any, Int](field = "intField", value = 1))) && - assert(queryStringTs)( - equalTo(Match[TestDocument, String](field = "stringField", value = "test")) - ) && - assert(queryBoolTs)( - equalTo(Match[TestDocument, Boolean](field = "booleanField", value = true)) - ) && - assert(queryIntTs)(equalTo(Match[TestDocument, Int](field = "intField", value = 1))) && - assert(queryWithSuffix)( - equalTo(Match[TestDocument, String](field = "stringField.raw", value = "test")) - ) && - assert(queryWithBoost)( - equalTo(Match[TestDocument, Double](field = "doubleField", value = 3.14)) - ) - }, - test("matchPhrase") { - val query = matchPhrase("stringField", "this is a test") - val queryTs = matchPhrase(TestDocument.stringField, "this is a test") - val queryWithSuffix = matchPhrase(TestDocument.stringField.raw, "this is a test") - val queryWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) - - assert(query)(equalTo(MatchPhrase[Any](field = "stringField", value = "this is a test", boost = None))) && - assert(queryTs)( - equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = None)) - ) && - assert(queryWithSuffix)( - equalTo(MatchPhrase[TestDocument](field = "stringField.raw", value = "this is a test", boost = None)) - ) && - assert(queryWithBoost)( - equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = Some(3))) - ) - }, - test("matchPhrasePrefix") { - val query = matchPhrasePrefix("stringField", "test") - val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") - - assert(query)(equalTo(MatchPhrasePrefix[Any](field = "stringField", value = "test"))) && - assert(queryTs)(equalTo(MatchPhrasePrefix[TestDocument](field = "stringField", value = "test"))) - }, - test("multiMatch") { - val query = multiMatch("this is a test") - val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") - val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) - val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) - val queryWithType = multiMatch("this is a test").matchingType(BestFields) - val queryWithBoost = multiMatch("this is a test").boost(2.2) - val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) - val queryWithAllParams = multiMatch("this is a test") - .fields(TestDocument.stringField) - .matchingType(BestFields) - .boost(2.2) - .minimumShouldMatch(2) - - assert(query)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFields)( - equalTo( - MultiMatch[Any]( - fields = Chunk("stringField1", "stringField2"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFieldsTs)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFieldsSuffix)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField.raw"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithType)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = Some(BestFields), - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = Some(2.2), - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField"), - value = "this is a test", - matchingType = Some(BestFields), - boost = Some(2.2), - minimumShouldMatch = Some(2) - ) - ) - ) - }, - test("nested") { - val query = nested("testField", matchAll) - val queryTs = nested(TestDocument.subDocumentList, matchAll) - val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue - val queryWithInnerHits = - nested(TestDocument.subDocumentList, matchAll).innerHits(InnerHits().from(0).name("innerHitName").size(3)) - val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits - val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse - .innerHits(InnerHits().name("innerHitName")) - .scoreMode(ScoreMode.Max) - - assert(query)( - equalTo( - Nested[Any]( - path = "testField", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryTs)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryWithIgnoreUnmapped)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = Some(true), - innerHitsField = None - ) - ) - ) && - assert(queryWithInnerHits)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = Some(0), - highlights = None, - name = Some("innerHitName"), - size = Some(3) - ) - ) - ) - ) - ) && - assert(queryWithInnerHitsEmpty)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = None, - highlights = None, - name = None, - size = None - ) - ) - ) - ) - ) && - assert(queryWithScoreMode)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = Some(ScoreMode.Avg), - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = Some(ScoreMode.Max), - ignoreUnmapped = Some(false), - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = None, - highlights = None, - name = Some("innerHitName"), - size = None - ) - ) - ) - ) - ) - }, - test("prefix") { - val query = prefix("stringField", "test") - val queryTs = prefix(TestDocument.stringField, "test") - val queryWithSuffix = prefix(TestDocument.stringField.keyword, "test") - val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue - - assert(query)( - equalTo(Prefix[Any](field = "stringField", value = "test", caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo(Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Prefix[TestDocument](field = "stringField.keyword", value = "test", caseInsensitive = None) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = Some(true)) - ) - ) - }, - test("range") { - val query = range("testField") - val queryString = range(TestDocument.stringField) - val queryInt = range(TestDocument.intField) - val queryWithSuffix = range(TestDocument.stringField.suffix("test")) - val queryLowerBound = range(TestDocument.doubleField).gt(3.14) - val queryUpperBound = range(TestDocument.doubleField).lt(10.21) - val queryInclusiveLowerBound = range(TestDocument.intField).gte(10) - val queryInclusiveUpperBound = range(TestDocument.intField).lte(21) - val queryMixedBounds = queryLowerBound.lte(21.0) - val queryWithBoostParam = queryMixedBounds.boost(2.8) - val queryWithFormatParam = range(TestDocument.dateField).gt(LocalDate.of(2023, 5, 11)).format("yyyy-MM-dd") - - assert(query)( - equalTo( - Range[Any, Any, Unbounded.type, Unbounded.type]( - field = "testField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryString)( - equalTo( - Range[TestDocument, String, Unbounded.type, Unbounded.type]( - field = "stringField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryInt)( - equalTo( - Range[TestDocument, Int, Unbounded.type, Unbounded.type]( - field = "intField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Range[TestDocument, String, Unbounded.type, Unbounded.type]( - field = "stringField.test", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryLowerBound)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], Unbounded.type]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryUpperBound)( - equalTo( - Range[TestDocument, Double, Unbounded.type, LessThan[Double]]( - field = "doubleField", - lower = Unbounded, - upper = LessThan(10.21), - boost = None, - format = None - ) - ) - ) && - assert(queryInclusiveLowerBound)( - equalTo( - Range[TestDocument, Int, GreaterThanOrEqualTo[Int], Unbounded.type]( - field = "intField", - lower = GreaterThanOrEqualTo(10), - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryInclusiveUpperBound)( - equalTo( - Range[TestDocument, Int, Unbounded.type, LessThanOrEqualTo[Int]]( - field = "intField", - lower = Unbounded, - upper = LessThanOrEqualTo(21), - boost = None, - format = None - ) - ) - ) && - assert(queryMixedBounds)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = LessThanOrEqualTo(21.0), - boost = None, - format = None - ) - ) - ) && - assert(queryWithBoostParam)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = LessThanOrEqualTo(21), - boost = Some(2.8), - format = None - ) - ) - ) && - assert(queryWithFormatParam)( - equalTo( - Range[TestDocument, LocalDate, GreaterThan[LocalDate], Unbounded.type]( - field = "dateField", - lower = GreaterThan(LocalDate.of(2023, 5, 11)), - upper = Unbounded, - boost = None, - format = Some("yyyy-MM-dd") - ) - ) - ) - }, - test("regexp") { - val query = regexp("stringField", "t.*st") - val queryTs = regexp(TestDocument.stringField, "t.*st") - val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue - val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") - - assert(query)(equalTo(Regexp[Any](field = "stringField", value = "t.*st", caseInsensitive = None))) && - assert(queryTs)( - equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = None)) - ) && - assert(queryWithCaseInsensitive)( - equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = Some(true))) - ) && - assert(queryWithSuffix)( - equalTo(Regexp[TestDocument](field = "stringField.raw", value = "t.*st", caseInsensitive = None)) - ) - }, - test("script") { - val query = - ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) - val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) - - assert(query)( - equalTo( - zio.elasticsearch.query.Script( - script = Script( - source = "doc['day_of_week'].value > params['day']", - params = Map("day" -> 2), - lang = Some(Painless) - ), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - zio.elasticsearch.query.Script( - script = Script( - source = "doc['day_of_week'].value > 2", - params = Map.empty, - lang = None - ), - boost = Some(2.0) - ) - ) - ) - }, - test("startsWith") { - val query = startsWith("testField", "test") - val queryTs = startsWith(TestDocument.stringField, "test") - val queryWithSuffix = startsWith(TestDocument.stringField.raw, "test") - val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = startsWith(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "test*", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test*", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = Some(true)) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test*", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("term") { - val queryString = term("stringField", "test") - val queryBool = term("booleanField", true) - val queryInt = term("intField", 1) - val queryStringTs = term(TestDocument.stringField, "test") - val queryBoolTs = term(TestDocument.booleanField, true) - val queryIntTs = term(TestDocument.intField, 1) - val queryWithSuffix = term(TestDocument.stringField.keyword, "test") - val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(queryString)( - equalTo(Term[Any, String](field = "stringField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryBool)( - equalTo(Term[Any, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None)) - ) && - assert(queryInt)( - equalTo(Term[Any, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) - ) && - assert(queryStringTs)( - equalTo( - Term[TestDocument, String](field = "stringField", value = "test", boost = None, caseInsensitive = None) - ) - ) && - assert(queryBoolTs)( - equalTo( - Term[TestDocument, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None) - ) - ) && - assert(queryIntTs)( - equalTo(Term[TestDocument, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Term[TestDocument, String]( - field = "stringField.keyword", - value = "test", - boost = None, - caseInsensitive = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = None, - caseInsensitive = Some(true) - ) - ) - ) && - assert(queryAllParams)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("terms") { - val queryString = terms("stringField", "a", "b", "c") - val queryBool = terms("booleanField", true, false) - val queryInt = terms("intField", 1, 2, 3) - val queryStringTs = terms(TestDocument.stringField, "a", "b", "c") - val queryBoolTs = terms(TestDocument.booleanField, true, false) - val queryIntTs = terms(TestDocument.intField, 1, 2, 3) - val queryWithSuffix = terms(TestDocument.stringField.keyword, "a", "b", "c") - val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) - - assert(queryString)( - equalTo(Terms[Any, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) - ) && - assert(queryBool)( - equalTo(Terms[Any, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) - ) && - assert(queryInt)( - equalTo(Terms[Any, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) - ) && - assert(queryStringTs)( - equalTo(Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) - ) && - assert(queryBoolTs)( - equalTo(Terms[TestDocument, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) - ) && - assert(queryIntTs)( - equalTo(Terms[TestDocument, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Terms[TestDocument, String](field = "stringField.keyword", values = Chunk("a", "b", "c"), boost = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = Some(10.21)) - ) - ) - }, - test("termsSet") { - val queryString = - termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") - val queryBool = - termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) - val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) - val queryStringTs = termsSet( - field = TestDocument.stringField, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSet( - field = TestDocument.booleanField, - minimumShouldMatchField = TestDocument.booleanField, - terms = true, - false - ) - val queryIntTs = - termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) - val queryWithSuffix = - termsSet( - field = TestDocument.stringField.keyword, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryWithBoost = - termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) - - assert(queryString)( - equalTo( - TermsSet[Any, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryBool)( - equalTo( - TermsSet[Any, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryInt)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("stringField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - TermsSet[TestDocument, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = Some("booleanField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - TermsSet[TestDocument, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("intField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField.keyword", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("stringField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = Some(10.0) - ) - ) - ) - }, - test("termsSetScript") { - val queryString = termsSetScript( - field = "stringField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBool = termsSetScript( - field = "booleanField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryInt = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ) - val queryStringTs = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSetScript( - field = TestDocument.booleanField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryIntTs = termsSetScript( - field = TestDocument.intField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ) - val queryWithSuffix = - termsSetScript( - field = TestDocument.stringField.keyword, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryWithBoost = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ).boost(10.0) - - assert(queryString)( - equalTo( - TermsSet[Any, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryBool)( - equalTo( - TermsSet[Any, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryInt)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - TermsSet[TestDocument, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - TermsSet[TestDocument, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField.keyword", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = Some(10.0) - ) - ) - ) - }, - test("wildcard") { - val query = wildcard("testField", "test") - val queryTs = wildcard(TestDocument.stringField, "test") - val queryWithSuffix = wildcard(TestDocument.stringField.raw, "test") - val queryWithBoost = wildcard(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = wildcard(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo(Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "test", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test", boost = Some(10.21), caseInsensitive = None) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = Some(true)) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - } - ), - suite("encoding as JSON")( - suite("bool")( - test("filter") { - val query = filter(matches(TestDocument.doubleField, 39.2)) - val queryWithBoost = filter(matches(TestDocument.booleanField, true)).boost(3.14) - - val expected = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match": { - | "doubleField": 39.2 - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("must") { - val query = must(matchPhrase(TestDocument.stringField, "test")) - val queryWithBoost = must(terms(TestDocument.stringField, "a", "b", "c")).boost(3.14) - - val expected = - """ - |{ - | "bool": { - | "must": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("mustNot") { - val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - val expected = - """ - |{ - | "bool": { - | "must_not": [ - | { - | "match": { - | "stringField": "test" - | } - | }, - | { - | "match": { - | "testField": "test field" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "must_not": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | } - | ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("should") { - val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - val queryWithMinimumShouldMatch = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).minimumShouldMatch(2) - val queryWithAllParams = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).boost(3.14).minimumShouldMatch(2) - - val expected = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField": "test" - | } - | }, - | { - | "match": { - | "testField": "test field" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | } - | ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | }, - | { - | "exists": { - | "field": "booleanField" - | } - | } - | ], - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | }, - | { - | "exists": { - | "field": "booleanField" - | } - | } - | ], - | "boost": 3.14, - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("filter + must + mustNot + should") { - val query1 = - filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) - val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) - .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) - val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) - .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) - .mustNot(matches(TestDocument.intField, 50)) - val queryWithBoost = query1.boost(3.14) - val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) - val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) - - val expected1 = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ], - | "must": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ] - | } - |} - |""".stripMargin - - val expected2 = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "doubleField": 3.14 - | } - | }, - | { - | "match": { - | "testField": true - | } - | }, - | { - | "exists": { - | "field": "anotherTestField" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expected3 = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "intField": 50 - | } - | } - | ], - | "should": [ - | { - | "range": { - | "intField": { - | "gt": 1, - | "lte": 100 - | } - | } - | }, - | { - | "match": { - | "stringField": "test" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ], - | "must": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "doubleField": 3.14 - | } - | }, - | { - | "match": { - | "testField": true - | } - | }, - | { - | "exists": { - | "field": "anotherTestField" - | } - | } - | ], - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "intField": 50 - | } - | } - | ], - | "should": [ - | { - | "range": { - | "intField": { - | "gt": 1, - | "lte": 100 - | } - | } - | }, - | { - | "match": { - | "stringField": "test" - | } - | } - | ], - | "boost": 3.14, - | "minimum_should_match": 3 - | } - |} - |""".stripMargin - - assert(query1.toJson(fieldPath = None))(equalTo(expected1.toJson)) && - assert(query2.toJson(fieldPath = None))(equalTo(expected2.toJson)) && - assert(query3.toJson(fieldPath = None))(equalTo(expected3.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - } - ), - test("boosting") { - val query = boosting(0.5f, exists("stringField"), terms("booleanField", true, false)) - val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) - - val expected = - """ - |{ - | "boosting": { - | "positive": { - | "terms": { - | "booleanField": [ true, false ] - | } - | }, - | "negative": { - | "exists": { - | "field": "stringField" - | } - | }, - | "negative_boost": 0.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("constantScore") { - val query = constantScore(matchPhrase("stringField", "test")) - val queryTs = constantScore(matchPhrase(TestDocument.stringField, "test")) - val queryWithBoost = constantScore(matchPhrase(TestDocument.stringField, "test")).boost(1.5) - - val expected = - """ - |{ - | "constant_score": { - | "filter": { - | "match_phrase": { - | "stringField": "test" - | } - | } - | } - |} - |""".stripMargin - val expectedWithBoost = - """ - |{ - | "constant_score": { - | "filter": { - | "match_phrase": { - | "stringField": "test" - | } - | }, - | "boost": 1.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("contains") { - val query = contains(TestDocument.stringField, "test") - val queryWithBoost = contains(TestDocument.stringField, "test").boost(3.14) - val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = contains(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("disjunctionMax") { - val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) - val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) - val queryWithTieBreaker = - disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) - - val expected = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "existsField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ] - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "stringField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ] - | } - |} - |""".stripMargin - - val expectedWithTieBreaker = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "existsField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ], - | "tie_breaker": 0.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryWithTieBreaker.toJson(fieldPath = None))(equalTo(expectedWithTieBreaker.toJson)) - }, - test("exists") { - val query = exists("testField") - val queryTs = exists(TestDocument.dateField) - val queryTsWithBoost = exists(TestDocument.dateField).boost(3) - - val expected = - """ - |{ - | "exists": { - | "field": "testField" - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "exists": { - | "field": "dateField" - | } - |} - |""".stripMargin - - val expectedTsWithBoost = - """ - |{ - | "exists": { - | "field": "dateField", - | "boost": 3.0 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryTsWithBoost.toJson(fieldPath = None))(equalTo(expectedTsWithBoost.toJson)) - }, - test("functionScore") { - val query = functionScore( - scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")), - randomScoreFunction().weight(2.0), - expDecayFunction("field", origin = "2013-09-17", scale = "10d") - .offset("5d") - .multiValueMode(Max) - .weight(10.0) - ) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Avg) - .maxBoost(42) - .minScore(32) - .query(matches("stringField", "string")) - .scoreMode(FunctionScoreScoreMode.Min) - - val expected = - """ - |{ - | "function_score": { - | "query" : { "match": { "stringField" : "string" } }, - | "score_mode": "min", - | "boost": 2.0, - | "boost_mode": "avg", - | "max_boost": 42.0, - | "min_score": 32.0, - | "functions": [ - | { - | "script_score": { - | "script": { - | "source": "params.agg1 + params.agg2 > 10" - | } - | } - | }, - | { - | "random_score": {}, - | "weight": 2.0 - | }, - | { - | "exp": { - | "field": { - | "origin": "2013-09-17", - | "scale": "10d", - | "offset": "5d" - | }, - | "multi_value_mode": "max" - | }, - | "weight": 10.0 - | } - | ] - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("fuzzy") { - val query = fuzzy("stringField", "test") - val queryTs = fuzzy(TestDocument.stringField, "test") - val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") - val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) - val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) - val queryWithAllParameters = - fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) - val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") - - val expected = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedWithFuzzinessAuto = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "fuzziness": "AUTO" - | } - | } - |} - |""".stripMargin - - val expectedWithMaxExpansions = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "max_expansions": 50 - | } - | } - |} - |""".stripMargin - - val expectedWithPrefixLength = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "prefix_length": 3 - | } - | } - |} - |""".stripMargin - - val expectedWithAllParameters = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "fuzziness": "AUTO", - | "max_expansions": 50, - | "prefix_length": 3 - | } - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "fuzzy": { - | "stringField.raw": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithFuzzinessAuto.toJson(fieldPath = None))(equalTo(expectedWithFuzzinessAuto.toJson)) && - assert(queryWithMaxExpansions.toJson(fieldPath = None))(equalTo(expectedWithMaxExpansions.toJson)) && - assert(queryWithPrefixLength.toJson(fieldPath = None))(equalTo(expectedWithPrefixLength.toJson)) && - assert(queryWithAllParameters.toJson(fieldPath = None))(equalTo(expectedWithAllParameters.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) - }, - test("geoDistance") { - val queryWithHash = - geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) - val queryWithPoint = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - val queryWithDistanceType = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) - val queryWithName = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") - val queryWithValidationMethod = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - .validationMethod(IgnoreMalformed) - .distanceType(Plane) - .name("name") - - val expectedWithHash = - """ - |{ - | "geo_distance": { - | "geoPointField": "drm3btev3e86", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithDistance = - """ - |{ - | "geo_distance": { - | "distance": "200.0km", - | "geoPointField": "20.0,21.1" - | } - |} - |""".stripMargin - - val expectedWithDistanceType = - """ - |{ - | "geo_distance": { - | "distance_type" : "plane", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithName = - """ - |{ - | "geo_distance": { - | "_name": "name", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithValidationMethod = - """ - |{ - | "geo_distance": { - | "validation_method": "IGNORE_MALFORMED", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "geo_distance": { - | "validation_method": "IGNORE_MALFORMED", - | "distance_type" : "plane", - | "_name": "name", - | "distance": "200.0km", - | "geoPointField": "20.0,21.1" - | } - |} - |""".stripMargin - - assert(queryWithHash.toJson(fieldPath = None))(equalTo(expectedWithHash.toJson)) && - assert(queryWithPoint.toJson(fieldPath = None))(equalTo(expectedWithDistance.toJson)) && - assert(queryWithDistanceType.toJson(fieldPath = None))(equalTo(expectedWithDistanceType.toJson)) && - assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && - assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("geoPolygon") { - val query = - geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) - val queryTs = - geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) - val queryWithName = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") - val queryWithValidationMethod = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) - .validationMethod(IgnoreMalformed) - .name("name") - - val expected = - """ - |{ - | "geo_polygon": { - | "testField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "geo_polygon": { - | "stringField": { - | "points": ["drm3btev3e86", "drm3btev3e87"] - | } - | } - |} - |""".stripMargin - - val expectedWithName = - """ - |{ - | "geo_polygon": { - | "_name": "name", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedWithValidationMethod = - """ - |{ - | "geo_polygon": { - | "validation_method": "IGNORE_MALFORMED", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "geo_polygon": { - | "validation_method": "IGNORE_MALFORMED", - | "_name": "name", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && - assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("hasChild") { - val query = hasChild("child", matches(TestDocument.stringField, "test")) - val queryWithIgnoreUnmapped = hasChild("child", matches("field", "value")).ignoreUnmappedTrue - val queryWithInnerHits = hasChild("child", matches("field", "value")).innerHits - val queryWithMaxChildren = hasChild("child", matches("field", "value")).maxChildren(5) - val queryWithMinChildren = hasChild("child", matches("field", "value")).minChildren(1) - val queryWithScoreMode = hasChild("child", matches("field", "value")).scoreMode(ScoreMode.Avg) - val queryWithAllParams = hasChild("child", matches("field", "value")) - .scoreMode(ScoreMode.Avg) - .ignoreUnmappedTrue - .innerHits - .maxChildren(5) - .minChildren(1) - - val expected = - """ - |{ - | "has_child": { - | "type": "child", - | "query": { - | "match": { - | "stringField" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "has_child": { - | "type": "child", - | "ignore_unmapped": true, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "has_child": { - | "type": "child", - | "inner_hits": {}, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithMaxChildren = - """ - |{ - | "has_child": { - | "type": "child", - | "max_children": 5, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithMinChildren = - """ - |{ - | "has_child": { - | "type": "child", - | "min_children": 1, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithScoreMode = - """ - |{ - | "has_child": { - | "type": "child", - | "score_mode": "avg", - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "has_child": { - | "type": "child", - | "score_mode": "avg", - | "ignore_unmapped": true, - | "inner_hits": {}, - | "max_children": 5, - | "min_children": 1, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithMaxChildren.toJson(fieldPath = None))(equalTo(expectedWithMaxChildren.toJson)) && - assert(queryWithMinChildren.toJson(fieldPath = None))(equalTo(expectedWithMinChildren.toJson)) && - assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("hasParent") { - val query = hasParent("parent", matches(TestDocument.stringField, "test")) - val queryWithBoost = hasParent("parent", matches(TestDocument.stringField, "test")).boost(3) - val queryWithScore = hasParent("parent", matches("field", "test")).withScoreFalse - val queryWithIgnoreUnmapped = hasParent("parent", matches("field", "test")).ignoreUnmappedFalse - val queryWithScoreAndIgnoreUnmapped = - hasParent("parent", matches("field", "test")).withScoreTrue.ignoreUnmappedTrue - val queryWithInnerHits = hasParent("parent", matches("field", "test")).innerHits - val queryWithAllParams = hasParent("parent", matches(TestDocument.stringField, "test")) - .boost(3) - .withScoreFalse - .ignoreUnmappedFalse - .innerHits - val expected = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | }, - | "boost": 3.0 - | } - |} - |""".stripMargin - - val expectedWithScore = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "score": false, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "ignore_unmapped": false, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithScoreAndIgnoreUnmapped = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "score": true, - | "ignore_unmapped": true, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "field" : "test" - | } - | }, - | "inner_hits": {} - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | }, - | "boost": 3.0, - | "ignore_unmapped": false, - | "score": false, - | "inner_hits": {} - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithScore.toJson(fieldPath = None))(equalTo(expectedWithScore.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithScoreAndIgnoreUnmapped.toJson(fieldPath = None))( - equalTo(expectedWithScoreAndIgnoreUnmapped.toJson) - ) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("ids") { - val query = ids("1", "2", "3") - - val expected = - """ - |{ - | "ids": { - | "values": ["1", "2", "3"] - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("kNN") { - val queryString = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBool = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryInt = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) - - val expectedString = - """ - |{ - | "field": "stringField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "field": "booleanField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "field": "intField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedWithSimilarity = - """ - |{ - | "field": "stringField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10, - | "similarity": 3.14 - |} - |""".stripMargin - - assert(queryString.toJson)(equalTo(expectedString.toJson)) && - assert(queryBool.toJson)(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson)(equalTo(expectedInt.toJson)) && - assert(queryWithSimilarity.toJson)(equalTo(expectedWithSimilarity.toJson)) - }, - test("matchAll") { - val query = matchAll - val queryWithBoost = matchAll.boost(3.14) - - val expected = - """ - |{ - | "match_all": {} - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "match_all": { - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("matchBooleanPrefix") { - val queryString = matchBooleanPrefix("stringField", "test") - val queryBool = matchBooleanPrefix("booleanField", true) - val queryInt = matchBooleanPrefix("intField", 1) - val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") - val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) - val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) - val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") - val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) - - val expectedString = - """ - |{ - | "match_bool_prefix": { - | "stringField": "test" - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "match_bool_prefix": { - | "booleanField": true - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "match_bool_prefix": { - | "intField": 1 - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "match_bool_prefix": { - | "stringField.raw": "test" - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "match_bool_prefix": { - | "stringField": { - | "query": "test", - | "minimum_should_match": 3 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))(equalTo(expectedWithMinimumShouldMatch.toJson)) - }, - test("matches") { - val query = matches("testField", true) - val queryTsInt = matches(TestDocument.intField, 39) - val queryTsString = matches(TestDocument.stringField, "test") - - val expected = - """ - |{ - | "match": { - | "testField": true - | } - |} - |""".stripMargin - - val expectedTsInt = - """ - |{ - | "match": { - | "intField": 39 - | } - |} - |""".stripMargin - - val expectedTsString = - """ - |{ - | "match": { - | "stringField": "test" - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTsInt.toJson(fieldPath = None))(equalTo(expectedTsInt.toJson)) && - assert(queryTsString.toJson(fieldPath = None))(equalTo(expectedTsString.toJson)) - }, - test("matchPhrase") { - val querySimple = matchPhrase("stringField", "this is a test") - val queryRaw = matchPhrase("stringField.raw", "this is a test") - val querySimpleTs = matchPhrase(TestDocument.stringField, "this is a test") - val queryRawTs = matchPhrase(TestDocument.stringField.raw, "this is a test") - val querySimpleTsWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) - - val expectedSimple = - """ - |{ - | "match_phrase": { - | "stringField": "this is a test" - | } - |} - |""".stripMargin - - val expectedRaw = - """ - |{ - | "match_phrase": { - | "stringField.raw": "this is a test" - | } - |} - |""".stripMargin - - val expectedSimpleTsWithBoost = - """ - |{ - | "match_phrase": { - | "stringField": { - | "query": "this is a test", - | "boost": 3.0 - | } - | } - |} - |""".stripMargin - - assert(querySimple.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && - assert(querySimpleTs.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && - assert(queryRaw.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && - assert(queryRawTs.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && - assert(querySimpleTsWithBoost.toJson(fieldPath = None))(equalTo(expectedSimpleTsWithBoost.toJson)) - }, - test("matchPhrasePrefix") { - val query = matchPhrasePrefix("stringField", "test") - val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") - - val expected = - """ - |{ - | "match_phrase_prefix": { - | "stringField": { - | "query" : "test" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("multiMatch") { - val query = multiMatch("this is a test") - val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") - val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) - val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) - val queryWithType = multiMatch("this is a test").matchingType(BestFields) - val queryWithBoost = multiMatch("this is a test").boost(2.2) - val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) - val queryWithAllParams = multiMatch("this is a test") - .fields(TestDocument.stringField) - .matchingType(BestFields) - .boost(2.2) - .minimumShouldMatch(2) - - val expected = - """ - |{ - | "multi_match": { - | "query": "this is a test" - | } - |} - |""".stripMargin - - val expectedWithFields = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField1", "stringField2" ] - | } - |} - |""".stripMargin - - val expectedWithFieldsTs = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField" ] - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField.raw" ] - | } - |} - |""".stripMargin - - val expectedWithType = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "type": "best_fields" - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "boost": 2.2 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "type": "best_fields", - | "fields": [ "stringField" ], - | "boost": 2.2, - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithFields.toJson(fieldPath = None))(equalTo(expectedWithFields.toJson)) && - assert(queryWithFieldsTs.toJson(fieldPath = None))(equalTo(expectedWithFieldsTs.toJson)) && - assert(queryWithFieldsSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && - assert(queryWithType.toJson(fieldPath = None))(equalTo(expectedWithType.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("nested") { - val query = nested(TestDocument.subDocumentList, matchAll) - val queryWithNested = nested(TestDocument.subDocumentList, nested("items", term("testField", "test"))) - val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue - val queryWithInnerHits = - nested(TestDocument.subDocumentList, matchAll).innerHits( - InnerHits() - .from(0) - .size(3) - .name("innerHitName") - .highlights(highlight("stringField")) - .excludes("longField") - .includes("intField") - ) - val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits - val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse - .innerHits(InnerHits().from(10).size(20).name("innerHitName")) - .scoreMode(ScoreMode.Min) - - val expected = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | } - | } - |} - |""".stripMargin - - val expectedWithNested = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "nested": { - | "path": "subDocumentList.items", - | "query": { - | "term": { - | "subDocumentList.items.testField": { - | "value": "test" - | } - | } - | } - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "ignore_unmapped": true - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "inner_hits": { - | "from": 0, - | "size": 3, - | "name": "innerHitName", - | "highlight" : { - | "fields" : { - | "subDocumentList.stringField" : {} - | } - | }, - | "_source" : { - | "includes" : [ - | "intField" - | ], - | "excludes" : [ - | "longField" - | ] - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHitsEmpty = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "inner_hits": {} - | } - |} - |""".stripMargin - - val expectedWithScoreMode = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "score_mode": "avg" - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "ignore_unmapped": false, - | "score_mode": "min", - | "inner_hits": { - | "from": 10, - | "size": 20, - | "name": "innerHitName" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithNested.toJson(fieldPath = None))(equalTo(expectedWithNested.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithInnerHitsEmpty.toJson(fieldPath = None))(equalTo(expectedWithInnerHitsEmpty.toJson)) && - assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("prefix") { - val query = prefix(TestDocument.stringField, "test") - val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue - - val expected = - """ - |{ - | "prefix": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "prefix": { - | "stringField": { - | "value": "test", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) - }, - test("range") { - val queryEmpty = range(TestDocument.intField) - val queryEmptyWithBoost = range(TestDocument.intField).boost(3.14) - val queryLowerBound = range(TestDocument.intField).gt(23) - val queryUpperBound = range(TestDocument.intField).lt(45) - val queryInclusiveLowerBound = range(TestDocument.intField).gte(23) - val queryInclusiveUpperBound = range(TestDocument.intField).lte(45) - val queryMixedBounds = range(TestDocument.intField).gt(10).lte(99) - val queryMixedBoundsWithBoost = range(TestDocument.intField).gt(10).lte(99).boost(3.14) - val queryWithFormat = range(TestDocument.dateField).gt(LocalDate.of(2020, 1, 10)).format("yyyy-MM-dd") - - val expectedEmpty = - """ - |{ - | "range": { - | "intField": { - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "range": { - | "intField": { - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedLowerBound = - """ - |{ - | "range": { - | "intField": { - | "gt": 23 - | } - | } - |} - |""".stripMargin - - val expectedUpperBound = - """ - |{ - | "range": { - | "intField": { - | "lt": 45 - | } - | } - |} - |""".stripMargin - - val expectedInclusiveLowerBound = - """ - |{ - | "range": { - | "intField": { - | "gte": 23 - | } - | } - |} - |""".stripMargin - - val expectedInclusiveUpperBound = - """ - |{ - | "range": { - | "intField": { - | "lte": 45 - | } - | } - |} - |""".stripMargin - - val expectedMixedBounds = - """ - |{ - | "range": { - | "intField": { - | "gt": 10, - | "lte": 99 - | } - | } - |} - |""".stripMargin - - val expectedMixedBoundsWithBoost = - """ - |{ - | "range": { - | "intField": { - | "gt": 10, - | "lte": 99, - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithFormat = - """ - |{ - | "range": { - | "dateField": { - | "gt": "2020-01-10", - | "format": "yyyy-MM-dd" - | } - | } - |} - |""".stripMargin - - assert(queryEmpty.toJson(fieldPath = None))(equalTo(expectedEmpty.toJson)) && - assert(queryEmptyWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryLowerBound.toJson(fieldPath = None))(equalTo(expectedLowerBound.toJson)) && - assert(queryUpperBound.toJson(fieldPath = None))(equalTo(expectedUpperBound.toJson)) && - assert(queryInclusiveLowerBound.toJson(fieldPath = None))(equalTo(expectedInclusiveLowerBound.toJson)) && - assert(queryInclusiveUpperBound.toJson(fieldPath = None))(equalTo(expectedInclusiveUpperBound.toJson)) && - assert(queryMixedBounds.toJson(fieldPath = None))(equalTo(expectedMixedBounds.toJson)) && - assert(queryMixedBoundsWithBoost.toJson(fieldPath = None))(equalTo(expectedMixedBoundsWithBoost.toJson)) && - assert(queryWithFormat.toJson(fieldPath = None))(equalTo(expectedWithFormat.toJson)) - }, - test("regexp") { - val query = regexp("stringField", "t.*st") - val queryTs = regexp(TestDocument.stringField, "t.*st") - val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue - val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") - - val expected = - """ - |{ - | "regexp": { - | "stringField": { - | "value": "t.*st" - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "regexp": { - | "stringField": { - | "value": "t.*st", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "regexp": { - | "stringField.raw": { - | "value": "t.*st" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) - }, - test("script") { - val query = - ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) - val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) - - val expected = - """ - |{ - | "script": { - | "script": { - | "lang": "painless", - | "source": "doc['day_of_week'].value > params['day']", - | "params": { - | "day": 2 - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "script": { - | "script": { - | "source": "doc['day_of_week'].value > 2" - | }, - | "boost": 2.0 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("startsWith") { - val query = startsWith(TestDocument.stringField, "test") - val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(3.14) - val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = startsWith(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("term") { - val queryString = term(TestDocument.stringField, "test") - val queryBool = term(TestDocument.booleanField, true) - val queryInt = term(TestDocument.intField, 21) - val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - val expectedString = - """ - |{ - | "term": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "term": { - | "booleanField": { - | "value": true - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "term": { - | "intField": { - | "value": 21 - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "boost": 10.21 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "boost": 3.14, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("terms") { - val queryString = terms(TestDocument.stringField, "a", "b", "c") - val queryBool = terms(TestDocument.booleanField, true, false) - val queryInt = terms(TestDocument.intField, 1, 2, 3, 4) - val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) - - val expectedString = - """ - |{ - | "terms": { - | "stringField": [ "a", "b", "c" ] - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms": { - | "booleanField": [ true, false ] - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms": { - | "intField": [ 1, 2, 3, 4 ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms": { - | "stringField": [ "a", "b", "c" ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("termsSet") { - val queryString = - termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") - val queryBool = - termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) - val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) - val queryStringTs = termsSet( - field = TestDocument.stringField, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSet( - field = TestDocument.booleanField, - minimumShouldMatchField = TestDocument.booleanField, - terms = true, - false - ) - val queryIntTs = - termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) - val queryWithBoost = - termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) - - val expectedString = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedStringTs = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_field": "stringField" - | } - | } - |} - |""".stripMargin - - val expectedBoolTs = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_field": "booleanField" - | } - | } - |} - |""".stripMargin - - val expectedIntTs = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3 ], - | "minimum_should_match_field": "intField" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3 ], - | "minimum_should_match_field": "required_matches", - | "boost": 10.0 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) - }, - test("termsSetScript") { - val queryString = termsSetScript( - field = "stringField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBool = termsSetScript( - field = "booleanField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryInt = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ) - val queryStringTs = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSetScript( - field = TestDocument.booleanField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryIntTs = termsSetScript( - field = TestDocument.intField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ) - val queryWithBoost = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ).boost(10.0) - - val expectedString = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedStringTs = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedBoolTs = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedIntTs = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | }, - | "boost": 10.0 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("wildcard") { - val query = wildcard(TestDocument.stringField, "[a-zA-Z]+") - val queryWithBoost = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(3.14) - val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "[a-zA-Z]+").caseInsensitiveTrue - val queryWithAllParams = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - } - ) - ) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticHighlight.highlight +import zio.elasticsearch.ElasticQuery.{script => _, _} +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain._ +import zio.elasticsearch.query.DistanceType.Plane +import zio.elasticsearch.query.DistanceUnit.Kilometers +import zio.elasticsearch.query.FunctionScoreFunction._ +import zio.elasticsearch.query.MultiMatchType._ +import zio.elasticsearch.query.MultiValueMode.Max +import zio.elasticsearch.query.ValidationMethod.IgnoreMalformed +import zio.elasticsearch.query._ +import zio.elasticsearch.script.{Painless, Script} +import zio.elasticsearch.utils._ +import zio.test.Assertion.equalTo +import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assert} + +import java.time.LocalDate + +object ElasticQuerySpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("ElasticQuery")( + suite("constructing")( + suite("bool")( + test("filter") { + val query = filter(matches(TestDocument.stringField, "test"), matches(field = "testField", "test field")) + val queryWithBoost = + filter(matches(TestDocument.stringField, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "intField", value = 22) + ), + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("must") { + val query = must(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + must(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("mustNot") { + val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("should") { + val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + val queryWithMinimumShouldMatch = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).minimumShouldMatch(2) + val queryWithAllParams = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).boost(3.14).minimumShouldMatch(2) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) && assert(queryWithMinimumShouldMatch)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22), + Exists(field = "booleanField", boost = None) + ), + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22), + Exists(field = "booleanField", boost = None) + ), + boost = Some(3.14), + minimumShouldMatch = Some(2) + ) + ) + ) + }, + test("filter + must + mustNot + should") { + val query1 = + filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) + val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) + .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) + val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) + .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) + .mustNot(matches(TestDocument.intField, 50)) + val queryWithBoost = query1.boost(3.14) + val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) + val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) + + assert(query1)( + equalTo( + Bool[TestDocument]( + filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), + must = Chunk(Match(field = "booleanField", value = true)), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(query2)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk( + Match(field = "doubleField", value = 3.14), + Match(field = "testField", value = true), + Exists(field = "anotherTestField", boost = None) + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(query3)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk(Match(field = "intField", value = 50)), + should = Chunk( + Range( + field = "intField", + lower = GreaterThan(1), + upper = LessThanOrEqualTo(100), + boost = None, + format = None + ), + Match(field = "stringField", value = "test") + ), + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), + must = Chunk(Match(field = "booleanField", value = true)), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(3.14), + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk( + Match(field = "doubleField", value = 3.14), + Match(field = "testField", value = true), + Exists(field = "anotherTestField", boost = None) + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk(Match(field = "intField", value = 50)), + should = Chunk( + Range( + field = "intField", + lower = GreaterThan(1), + upper = LessThanOrEqualTo(100), + boost = None, + format = None + ), + Match(field = "stringField", value = "test") + ), + boost = Some(3.14), + minimumShouldMatch = Some(3) + ) + ) + ) + } + ), + test("boosting") { + val query = boosting(0.5f, exists("testField"), terms("booleanField", true, false)) + val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) + + assert(query)( + equalTo( + Boosting[Any]( + negativeBoost = 0.5f, + negativeQuery = exists("testField"), + positiveQuery = terms("booleanField", true, false) + ) + ) + ) && assert(queryTs)( + equalTo( + Boosting[TestDocument]( + negativeBoost = 0.5f, + negativeQuery = exists(TestDocument.stringField), + positiveQuery = terms(TestDocument.booleanField, true, false) + ) + ) + ) + }, + test("constantScore") { + val query = constantScore(terms("stringField", "a", "b", "c")) + val queryTs = constantScore(terms(TestDocument.stringField, "a", "b", "c")) + val queryWithBoost = constantScore(terms(TestDocument.stringField, "a", "b", "c")).boost(2.2) + + assert(query)( + equalTo( + ConstantScore[Any]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = None + ) + ) + ) && + assert(queryTs)( + equalTo( + ConstantScore[TestDocument]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + ConstantScore[TestDocument]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = Some(2.2) + ) + ) + ) + }, + test("contains") { + val query = contains("testField", "test") + val queryTs = contains(TestDocument.stringField, "test") + val queryWithSuffix = contains(TestDocument.stringField.raw, "test") + val queryWithBoost = contains(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = contains(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "*test*", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "*test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "*test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = None, + caseInsensitive = Some(true) + ) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("disjunctionMax") { + val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) + val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) + val queryWithTieBreaker = disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) + + assert(query)( + equalTo( + DisjunctionMax[Any]( + queries = + Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = None + ) + ) + ) && + assert(queryTs)( + equalTo( + DisjunctionMax[TestDocument]( + queries = + Chunk(Exists[Any](field = "stringField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = None + ) + ) + ) && + assert(queryWithTieBreaker)( + equalTo( + DisjunctionMax[Any]( + queries = + Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = Some(0.5f) + ) + ) + ) + }, + test("exists") { + val query = exists("testField") + val queryTs = exists(TestDocument.intField) + val queryWithBoost = exists(TestDocument.intField).boost(3) + + assert(query)(equalTo(Exists[Any](field = "testField", boost = None))) && + assert(queryTs)(equalTo(Exists[TestDocument](field = "intField", boost = None))) && + assert(queryWithBoost)(equalTo(Exists[TestDocument](field = "intField", boost = Some(3)))) + + }, + test("functionScore") { + val scriptScore = scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")) + val weight = weightFunction(10.0) + val randomScore = randomScoreFunction() + val fieldValue = fieldValueFactor(TestDocument.stringField) + val decay = expDecayFunction("field", origin = "11, 12", scale = "2km") + val typedDecay = expDecayFunction(TestDocument.intField, origin = "11,12", scale = "2km") + + val fullQuery: FunctionScoreQuery[TestDocument] = functionScore(scriptScore, weight, randomScore) + .withFunctions(decay) + .withFunctions(fieldValue) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Avg) + .maxBoost(42) + .minScore(32) + .query(matches("stringField", "value")) + .scoreMode(FunctionScoreScoreMode.Min) + + val queryWithType: FunctionScoreQuery[TestDocument] = + functionScore(fieldValue).query(matches(TestDocument.stringField, "value")) + val queryTypeShrink: FunctionScoreQuery[TestDocument] = + functionScore(scriptScore).query(matches(TestDocument.stringField, "value")) + val queryWithoutTypeShrink: FunctionScoreQuery[Any] = + functionScore(scriptScore).query(matches("stringField", "value")) + val queryWithNewAnyQuery: FunctionScoreQuery[TestDocument] = + functionScore(fieldValue).query(matches("stringField", "value")) + + val anyQueryWithNewTypedFunction = functionScore(scriptScore).withFunctions(fieldValue) + val anyQueryWithNewAnyFunction = functionScore(scriptScore).withFunctions(weight) + val typedQueryWithNewTypedFunction = functionScore(fieldValue).withFunctions(typedDecay) + val typedQueryWithNewAnyFunction = functionScore(fieldValue).withFunctions(weight) + + assert(fullQuery)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk( + scriptScore, + weight, + randomScore, + decay, + fieldValue + ), + boost = Some(2.0), + boostMode = Some(FunctionScoreBoostMode.Avg), + maxBoost = Some(42.0), + minScore = Some(32.0), + query = Some(Match("stringField", "value")), + scoreMode = Some(FunctionScoreScoreMode.Min) + ) + ) + ) && + assert(queryTypeShrink)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(scriptScore), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithType)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithoutTypeShrink)( + equalTo( + FunctionScore[Any]( + functionScoreFunctions = Chunk(scriptScore), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithNewAnyQuery)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(anyQueryWithNewTypedFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(scriptScore, fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(anyQueryWithNewAnyFunction)( + equalTo( + FunctionScore[Any]( + functionScoreFunctions = Chunk(scriptScore, weight), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(typedQueryWithNewTypedFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue, typedDecay), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(typedQueryWithNewAnyFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue, weight), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) + }, + test("fuzzy") { + val query = fuzzy("stringField", "test") + val queryTs = fuzzy(TestDocument.stringField, "test") + val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") + val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) + val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) + val queryWithAllParameters = + fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) + val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") + + assert(query)( + equalTo( + Fuzzy[Any]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryTs)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithFuzzinessAuto)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = Some("AUTO"), + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithMaxExpansions)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = Some(50), + prefixLength = None + ) + ) + ) && + assert(queryWithPrefixLength)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = Some(3) + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField.raw", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithAllParameters)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = Some("AUTO"), + maxExpansions = Some(50), + prefixLength = Some(3) + ) + ) + ) + }, + test("geoDistance") { + val queryWithHash = + geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) + val queryWithPoint = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + val queryWithDistanceType = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) + val queryWithName = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") + val queryWithValidationMethod = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + .validationMethod(IgnoreMalformed) + .distanceType(Plane) + .name("name") + + assert(queryWithHash)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "drm3btev3e86", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = None + ) + ) + ) && + assert(queryWithPoint)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithDistanceType)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = Some(Plane), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithName)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = Some("name"), + validationMethod = None + ) + ) + ) && assert(queryWithValidationMethod)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = Some(IgnoreMalformed) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = Some(Plane), + queryName = Some("name"), + validationMethod = Some(IgnoreMalformed) + ) + ) + ) + }, + test("geoPolygon") { + val query = + geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) + val queryTs = + geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) + val queryWithName = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") + val queryWithValidationMethod = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) + .validationMethod(IgnoreMalformed) + .name("name") + + assert(query)( + equalTo( + GeoPolygon[Any]( + field = "testField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryTs)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("drm3btev3e86", "drm3btev3e87"), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithName)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = Some("name"), + validationMethod = None + ) + ) + ) && assert(queryWithValidationMethod)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = None, + validationMethod = Some(IgnoreMalformed) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = Some("name"), + validationMethod = Some(IgnoreMalformed) + ) + ) + ) + }, + test("hasChild") { + val query = hasChild("child", matchAll) + val queryWithIgnoreUnmapped = hasChild("child", matchAll).ignoreUnmappedTrue + val queryWithInnerHits = hasChild("child", matchAll).innerHits + val queryWithMaxChildren = hasChild("child", matchAll).maxChildren(5) + val queryWithMinChildren = hasChild("child", matchAll).minChildren(1) + val queryWithScoreMode = hasChild("child", matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = hasChild("child", matchAll) + .scoreMode(ScoreMode.Avg) + .ignoreUnmappedTrue + .innerHits + .maxChildren(5) + .minChildren(1) + + assert(query)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithIgnoreUnmapped)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = Some(true), + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithInnerHits)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = Some(InnerHits()), + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithMaxChildren)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = Some(5), + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithMinChildren)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = Some(1), + scoreMode = None + ) + ) + ) && assert(queryWithScoreMode)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = Some(ScoreMode.Avg) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = Some(true), + innerHitsField = Some(InnerHits()), + maxChildren = Some(5), + minChildren = Some(1), + scoreMode = Some(ScoreMode.Avg) + ) + ) + ) + }, + test("hasParent") { + val query = hasParent("parent", matchAll) + val queryWithBoost = hasParent("parent", matchAll).boost(3) + val queryWithScoreTrue = hasParent("parent", matchAll).withScoreTrue + val queryWithScoreFalse = hasParent("parent", matchAll).withScoreFalse + val queryWithIgnoreUnmappedTrue = hasParent("parent", matchAll).ignoreUnmappedTrue + val queryWithIgnoreUnmappedFalse = hasParent("parent", matchAll).ignoreUnmappedFalse + val queryWithAllParams = hasParent("parent", matchAll).boost(3).ignoreUnmappedFalse.withScoreTrue + + assert(query)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = Some(3.0), + ignoreUnmapped = None, + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithScoreTrue)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = Some(true) + ) + ) + ) && assert(queryWithScoreFalse)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = Some(false) + ) + ) + ) && assert(queryWithIgnoreUnmappedTrue)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = Some(true), + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithIgnoreUnmappedFalse)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = Some(false), + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = Some(3.0), + ignoreUnmapped = Some(false), + innerHitsField = None, + score = Some(true) + ) + ) + ) + }, + test("ids") { + val idsQuery = ids("1", "2", "3") + + assert(idsQuery)( + equalTo( + Ids[Any]( + values = Chunk("1", "2", "3") + ) + ) + ) + }, + test("kNN") { + val queryString = kNN("stringField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBool = kNN("boolField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryInt = kNN("intField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryStringTs = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBoolTs = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryIntTs = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) + + assert(queryString)( + equalTo( + KNN[Any]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryBool)( + equalTo( + KNN[Any]( + field = "boolField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryInt)( + equalTo( + KNN[Any]( + field = "intField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + KNN[TestDocument]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + KNN[TestDocument]( + field = "booleanField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + KNN[TestDocument]( + field = "intField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryWithSimilarity)( + equalTo( + KNN[TestDocument]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = Some(3.14) + ) + ) + ) + }, + test("matchAll") { + val query = matchAll + val queryWithBoost = matchAll.boost(3.14) + + assert(query)(equalTo(MatchAll(boost = None))) && assert(queryWithBoost)( + equalTo(MatchAll(boost = Some(3.14))) + ) + }, + test("matchBooleanPrefix") { + val queryString = matchBooleanPrefix("stringField", "test") + val queryBool = matchBooleanPrefix("booleanField", true) + val queryInt = matchBooleanPrefix("intField", 1) + val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") + val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) + val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) + val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") + val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) + + assert(queryString)( + equalTo(MatchBooleanPrefix[Any, String](field = "stringField", value = "test", minimumShouldMatch = None)) + ) && + assert(queryBool)( + equalTo(MatchBooleanPrefix[Any, Boolean](field = "booleanField", value = true, minimumShouldMatch = None)) + ) && + assert(queryInt)( + equalTo(MatchBooleanPrefix[Any, Int](field = "intField", value = 1, minimumShouldMatch = None)) + ) && + assert(queryStringTs)( + equalTo( + MatchBooleanPrefix[TestDocument, String](field = "stringField", value = "test", minimumShouldMatch = None) + ) + ) && + assert(queryBoolTs)( + equalTo( + MatchBooleanPrefix[TestDocument, Boolean](field = "booleanField", value = true, minimumShouldMatch = None) + ) + ) && + assert(queryIntTs)( + equalTo(MatchBooleanPrefix[TestDocument, Int](field = "intField", value = 1, minimumShouldMatch = None)) + ) && + assert(queryWithSuffix)( + equalTo( + MatchBooleanPrefix[TestDocument, String]( + field = "stringField.raw", + value = "test", + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + MatchBooleanPrefix[TestDocument, String]( + field = "stringField", + value = "test", + minimumShouldMatch = Some(3) + ) + ) + ) + }, + test("matches") { + val queryString = matches("stringField", "test") + val queryBool = matches("booleanField", true) + val queryInt = matches("intField", 1) + val queryStringTs = matches(TestDocument.stringField, "test") + val queryBoolTs = matches(TestDocument.booleanField, true) + val queryIntTs = matches(TestDocument.intField, 1) + val queryWithSuffix = matches(TestDocument.stringField.raw, "test") + val queryWithBoost = matches(TestDocument.doubleField, 3.14) + + assert(queryString)(equalTo(Match[Any, String](field = "stringField", value = "test"))) && + assert(queryBool)(equalTo(Match[Any, Boolean](field = "booleanField", value = true))) && + assert(queryInt)(equalTo(Match[Any, Int](field = "intField", value = 1))) && + assert(queryStringTs)( + equalTo(Match[TestDocument, String](field = "stringField", value = "test")) + ) && + assert(queryBoolTs)( + equalTo(Match[TestDocument, Boolean](field = "booleanField", value = true)) + ) && + assert(queryIntTs)(equalTo(Match[TestDocument, Int](field = "intField", value = 1))) && + assert(queryWithSuffix)( + equalTo(Match[TestDocument, String](field = "stringField.raw", value = "test")) + ) && + assert(queryWithBoost)( + equalTo(Match[TestDocument, Double](field = "doubleField", value = 3.14)) + ) + }, + test("matchPhrase") { + val query = matchPhrase("stringField", "this is a test") + val queryTs = matchPhrase(TestDocument.stringField, "this is a test") + val queryWithSuffix = matchPhrase(TestDocument.stringField.raw, "this is a test") + val queryWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) + + assert(query)(equalTo(MatchPhrase[Any](field = "stringField", value = "this is a test", boost = None))) && + assert(queryTs)( + equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = None)) + ) && + assert(queryWithSuffix)( + equalTo(MatchPhrase[TestDocument](field = "stringField.raw", value = "this is a test", boost = None)) + ) && + assert(queryWithBoost)( + equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = Some(3))) + ) + }, + test("matchPhrasePrefix") { + val query = matchPhrasePrefix("stringField", "test") + val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") + + assert(query)(equalTo(MatchPhrasePrefix[Any](field = "stringField", value = "test"))) && + assert(queryTs)(equalTo(MatchPhrasePrefix[TestDocument](field = "stringField", value = "test"))) + }, + test("multiMatch") { + val query = multiMatch("this is a test") + val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") + val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) + val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) + val queryWithType = multiMatch("this is a test").matchingType(BestFields) + val queryWithBoost = multiMatch("this is a test").boost(2.2) + val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) + val queryWithAllParams = multiMatch("this is a test") + .fields(TestDocument.stringField) + .matchingType(BestFields) + .boost(2.2) + .minimumShouldMatch(2) + + assert(query)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFields)( + equalTo( + MultiMatch[Any]( + fields = Chunk("stringField1", "stringField2"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFieldsTs)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFieldsSuffix)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField.raw"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithType)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = Some(BestFields), + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = Some(2.2), + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField"), + value = "this is a test", + matchingType = Some(BestFields), + boost = Some(2.2), + minimumShouldMatch = Some(2) + ) + ) + ) + }, + test("nested") { + val query = nested("testField", matchAll) + val queryTs = nested(TestDocument.subDocumentList, matchAll) + val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue + val queryWithInnerHits = + nested(TestDocument.subDocumentList, matchAll).innerHits(InnerHits().from(0).name("innerHitName").size(3)) + val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits + val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse + .innerHits(InnerHits().name("innerHitName")) + .scoreMode(ScoreMode.Max) + + assert(query)( + equalTo( + Nested[Any]( + path = "testField", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryTs)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryWithIgnoreUnmapped)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = Some(true), + innerHitsField = None + ) + ) + ) && + assert(queryWithInnerHits)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = Some(0), + highlights = None, + name = Some("innerHitName"), + size = Some(3) + ) + ) + ) + ) + ) && + assert(queryWithInnerHitsEmpty)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = None, + highlights = None, + name = None, + size = None + ) + ) + ) + ) + ) && + assert(queryWithScoreMode)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = Some(ScoreMode.Avg), + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = Some(ScoreMode.Max), + ignoreUnmapped = Some(false), + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = None, + highlights = None, + name = Some("innerHitName"), + size = None + ) + ) + ) + ) + ) + }, + test("prefix") { + val query = prefix("stringField", "test") + val queryTs = prefix(TestDocument.stringField, "test") + val queryWithSuffix = prefix(TestDocument.stringField.keyword, "test") + val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue + + assert(query)( + equalTo(Prefix[Any](field = "stringField", value = "test", caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo(Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Prefix[TestDocument](field = "stringField.keyword", value = "test", caseInsensitive = None) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = Some(true)) + ) + ) + }, + test("range") { + val query = range("testField") + val queryString = range(TestDocument.stringField) + val queryInt = range(TestDocument.intField) + val queryWithSuffix = range(TestDocument.stringField.suffix("test")) + val queryLowerBound = range(TestDocument.doubleField).gt(3.14) + val queryUpperBound = range(TestDocument.doubleField).lt(10.21) + val queryInclusiveLowerBound = range(TestDocument.intField).gte(10) + val queryInclusiveUpperBound = range(TestDocument.intField).lte(21) + val queryMixedBounds = queryLowerBound.lte(21.0) + val queryWithBoostParam = queryMixedBounds.boost(2.8) + val queryWithFormatParam = range(TestDocument.dateField).gt(LocalDate.of(2023, 5, 11)).format("yyyy-MM-dd") + + assert(query)( + equalTo( + Range[Any, Any, Unbounded.type, Unbounded.type]( + field = "testField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryString)( + equalTo( + Range[TestDocument, String, Unbounded.type, Unbounded.type]( + field = "stringField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryInt)( + equalTo( + Range[TestDocument, Int, Unbounded.type, Unbounded.type]( + field = "intField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Range[TestDocument, String, Unbounded.type, Unbounded.type]( + field = "stringField.test", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryLowerBound)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], Unbounded.type]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryUpperBound)( + equalTo( + Range[TestDocument, Double, Unbounded.type, LessThan[Double]]( + field = "doubleField", + lower = Unbounded, + upper = LessThan(10.21), + boost = None, + format = None + ) + ) + ) && + assert(queryInclusiveLowerBound)( + equalTo( + Range[TestDocument, Int, GreaterThanOrEqualTo[Int], Unbounded.type]( + field = "intField", + lower = GreaterThanOrEqualTo(10), + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryInclusiveUpperBound)( + equalTo( + Range[TestDocument, Int, Unbounded.type, LessThanOrEqualTo[Int]]( + field = "intField", + lower = Unbounded, + upper = LessThanOrEqualTo(21), + boost = None, + format = None + ) + ) + ) && + assert(queryMixedBounds)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = LessThanOrEqualTo(21.0), + boost = None, + format = None + ) + ) + ) && + assert(queryWithBoostParam)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = LessThanOrEqualTo(21), + boost = Some(2.8), + format = None + ) + ) + ) && + assert(queryWithFormatParam)( + equalTo( + Range[TestDocument, LocalDate, GreaterThan[LocalDate], Unbounded.type]( + field = "dateField", + lower = GreaterThan(LocalDate.of(2023, 5, 11)), + upper = Unbounded, + boost = None, + format = Some("yyyy-MM-dd") + ) + ) + ) + }, + test("regexp") { + val query = regexp("stringField", "t.*st") + val queryTs = regexp(TestDocument.stringField, "t.*st") + val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue + val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") + + assert(query)(equalTo(Regexp[Any](field = "stringField", value = "t.*st", caseInsensitive = None))) && + assert(queryTs)( + equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = None)) + ) && + assert(queryWithCaseInsensitive)( + equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = Some(true))) + ) && + assert(queryWithSuffix)( + equalTo(Regexp[TestDocument](field = "stringField.raw", value = "t.*st", caseInsensitive = None)) + ) + }, + test("script") { + val query = + ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) + val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) + + assert(query)( + equalTo( + zio.elasticsearch.query.Script( + script = Script( + source = "doc['day_of_week'].value > params['day']", + params = Map("day" -> 2), + lang = Some(Painless) + ), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + zio.elasticsearch.query.Script( + script = Script( + source = "doc['day_of_week'].value > 2", + params = Map.empty, + lang = None + ), + boost = Some(2.0) + ) + ) + ) + }, + test("startsWith") { + val query = startsWith("testField", "test") + val queryTs = startsWith(TestDocument.stringField, "test") + val queryWithSuffix = startsWith(TestDocument.stringField.raw, "test") + val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = startsWith(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "test*", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test*", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = Some(true)) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test*", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("term") { + val queryString = term("stringField", "test") + val queryBool = term("booleanField", true) + val queryInt = term("intField", 1) + val queryStringTs = term(TestDocument.stringField, "test") + val queryBoolTs = term(TestDocument.booleanField, true) + val queryIntTs = term(TestDocument.intField, 1) + val queryWithSuffix = term(TestDocument.stringField.keyword, "test") + val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(queryString)( + equalTo(Term[Any, String](field = "stringField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryBool)( + equalTo(Term[Any, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None)) + ) && + assert(queryInt)( + equalTo(Term[Any, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) + ) && + assert(queryStringTs)( + equalTo( + Term[TestDocument, String](field = "stringField", value = "test", boost = None, caseInsensitive = None) + ) + ) && + assert(queryBoolTs)( + equalTo( + Term[TestDocument, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None) + ) + ) && + assert(queryIntTs)( + equalTo(Term[TestDocument, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Term[TestDocument, String]( + field = "stringField.keyword", + value = "test", + boost = None, + caseInsensitive = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = None, + caseInsensitive = Some(true) + ) + ) + ) && + assert(queryAllParams)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("terms") { + val queryString = terms("stringField", "a", "b", "c") + val queryBool = terms("booleanField", true, false) + val queryInt = terms("intField", 1, 2, 3) + val queryStringTs = terms(TestDocument.stringField, "a", "b", "c") + val queryBoolTs = terms(TestDocument.booleanField, true, false) + val queryIntTs = terms(TestDocument.intField, 1, 2, 3) + val queryWithSuffix = terms(TestDocument.stringField.keyword, "a", "b", "c") + val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) + + assert(queryString)( + equalTo(Terms[Any, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) + ) && + assert(queryBool)( + equalTo(Terms[Any, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) + ) && + assert(queryInt)( + equalTo(Terms[Any, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) + ) && + assert(queryStringTs)( + equalTo(Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) + ) && + assert(queryBoolTs)( + equalTo(Terms[TestDocument, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) + ) && + assert(queryIntTs)( + equalTo(Terms[TestDocument, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Terms[TestDocument, String](field = "stringField.keyword", values = Chunk("a", "b", "c"), boost = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = Some(10.21)) + ) + ) + }, + test("termsSet") { + val queryString = + termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") + val queryBool = + termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) + val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) + val queryStringTs = termsSet( + field = TestDocument.stringField, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSet( + field = TestDocument.booleanField, + minimumShouldMatchField = TestDocument.booleanField, + terms = true, + false + ) + val queryIntTs = + termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) + val queryWithSuffix = + termsSet( + field = TestDocument.stringField.keyword, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryWithBoost = + termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) + + assert(queryString)( + equalTo( + TermsSet[Any, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryBool)( + equalTo( + TermsSet[Any, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryInt)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("stringField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + TermsSet[TestDocument, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = Some("booleanField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + TermsSet[TestDocument, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("intField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField.keyword", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("stringField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = Some(10.0) + ) + ) + ) + }, + test("termsSetScript") { + val queryString = termsSetScript( + field = "stringField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBool = termsSetScript( + field = "booleanField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryInt = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ) + val queryStringTs = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSetScript( + field = TestDocument.booleanField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryIntTs = termsSetScript( + field = TestDocument.intField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ) + val queryWithSuffix = + termsSetScript( + field = TestDocument.stringField.keyword, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryWithBoost = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ).boost(10.0) + + assert(queryString)( + equalTo( + TermsSet[Any, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryBool)( + equalTo( + TermsSet[Any, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryInt)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + TermsSet[TestDocument, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + TermsSet[TestDocument, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField.keyword", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = Some(10.0) + ) + ) + ) + }, + test("wildcard") { + val query = wildcard("testField", "test") + val queryTs = wildcard(TestDocument.stringField, "test") + val queryWithSuffix = wildcard(TestDocument.stringField.raw, "test") + val queryWithBoost = wildcard(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = wildcard(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo(Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "test", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test", boost = Some(10.21), caseInsensitive = None) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = Some(true)) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + } + ), + suite("encoding as JSON")( + suite("bool")( + test("filter") { + val query = filter(matches(TestDocument.doubleField, 39.2)) + val queryWithBoost = filter(matches(TestDocument.booleanField, true)).boost(3.14) + + val expected = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match": { + | "doubleField": 39.2 + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("must") { + val query = must(matchPhrase(TestDocument.stringField, "test")) + val queryWithBoost = must(terms(TestDocument.stringField, "a", "b", "c")).boost(3.14) + + val expected = + """ + |{ + | "bool": { + | "must": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("mustNot") { + val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + val expected = + """ + |{ + | "bool": { + | "must_not": [ + | { + | "match": { + | "stringField": "test" + | } + | }, + | { + | "match": { + | "testField": "test field" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "must_not": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | } + | ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("should") { + val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + val queryWithMinimumShouldMatch = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).minimumShouldMatch(2) + val queryWithAllParams = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).boost(3.14).minimumShouldMatch(2) + + val expected = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField": "test" + | } + | }, + | { + | "match": { + | "testField": "test field" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | } + | ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | }, + | { + | "exists": { + | "field": "booleanField" + | } + | } + | ], + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | }, + | { + | "exists": { + | "field": "booleanField" + | } + | } + | ], + | "boost": 3.14, + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("filter + must + mustNot + should") { + val query1 = + filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) + val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) + .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) + val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) + .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) + .mustNot(matches(TestDocument.intField, 50)) + val queryWithBoost = query1.boost(3.14) + val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) + val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) + + val expected1 = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ], + | "must": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ] + | } + |} + |""".stripMargin + + val expected2 = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "doubleField": 3.14 + | } + | }, + | { + | "match": { + | "testField": true + | } + | }, + | { + | "exists": { + | "field": "anotherTestField" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expected3 = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "intField": 50 + | } + | } + | ], + | "should": [ + | { + | "range": { + | "intField": { + | "gt": 1, + | "lte": 100 + | } + | } + | }, + | { + | "match": { + | "stringField": "test" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ], + | "must": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "doubleField": 3.14 + | } + | }, + | { + | "match": { + | "testField": true + | } + | }, + | { + | "exists": { + | "field": "anotherTestField" + | } + | } + | ], + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "intField": 50 + | } + | } + | ], + | "should": [ + | { + | "range": { + | "intField": { + | "gt": 1, + | "lte": 100 + | } + | } + | }, + | { + | "match": { + | "stringField": "test" + | } + | } + | ], + | "boost": 3.14, + | "minimum_should_match": 3 + | } + |} + |""".stripMargin + + assert(query1.toJson(fieldPath = None))(equalTo(expected1.toJson)) && + assert(query2.toJson(fieldPath = None))(equalTo(expected2.toJson)) && + assert(query3.toJson(fieldPath = None))(equalTo(expected3.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + } + ), + test("boosting") { + val query = boosting(0.5f, exists("stringField"), terms("booleanField", true, false)) + val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) + + val expected = + """ + |{ + | "boosting": { + | "positive": { + | "terms": { + | "booleanField": [ true, false ] + | } + | }, + | "negative": { + | "exists": { + | "field": "stringField" + | } + | }, + | "negative_boost": 0.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("constantScore") { + val query = constantScore(matchPhrase("stringField", "test")) + val queryTs = constantScore(matchPhrase(TestDocument.stringField, "test")) + val queryWithBoost = constantScore(matchPhrase(TestDocument.stringField, "test")).boost(1.5) + + val expected = + """ + |{ + | "constant_score": { + | "filter": { + | "match_phrase": { + | "stringField": "test" + | } + | } + | } + |} + |""".stripMargin + val expectedWithBoost = + """ + |{ + | "constant_score": { + | "filter": { + | "match_phrase": { + | "stringField": "test" + | } + | }, + | "boost": 1.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("contains") { + val query = contains(TestDocument.stringField, "test") + val queryWithBoost = contains(TestDocument.stringField, "test").boost(3.14) + val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = contains(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("disjunctionMax") { + val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) + val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) + val queryWithTieBreaker = + disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) + + val expected = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "existsField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ] + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "stringField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ] + | } + |} + |""".stripMargin + + val expectedWithTieBreaker = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "existsField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ], + | "tie_breaker": 0.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryWithTieBreaker.toJson(fieldPath = None))(equalTo(expectedWithTieBreaker.toJson)) + }, + test("exists") { + val query = exists("testField") + val queryTs = exists(TestDocument.dateField) + val queryTsWithBoost = exists(TestDocument.dateField).boost(3) + + val expected = + """ + |{ + | "exists": { + | "field": "testField" + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "exists": { + | "field": "dateField" + | } + |} + |""".stripMargin + + val expectedTsWithBoost = + """ + |{ + | "exists": { + | "field": "dateField", + | "boost": 3.0 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryTsWithBoost.toJson(fieldPath = None))(equalTo(expectedTsWithBoost.toJson)) + }, + test("functionScore") { + val query = functionScore( + scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")), + randomScoreFunction().weight(2.0), + expDecayFunction("field", origin = "2013-09-17", scale = "10d") + .offset("5d") + .multiValueMode(Max) + .weight(10.0) + ) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Avg) + .maxBoost(42) + .minScore(32) + .query(matches("stringField", "string")) + .scoreMode(FunctionScoreScoreMode.Min) + + val expected = + """ + |{ + | "function_score": { + | "query" : { "match": { "stringField" : "string" } }, + | "score_mode": "min", + | "boost": 2.0, + | "boost_mode": "avg", + | "max_boost": 42.0, + | "min_score": 32.0, + | "functions": [ + | { + | "script_score": { + | "script": { + | "source": "params.agg1 + params.agg2 > 10" + | } + | } + | }, + | { + | "random_score": {}, + | "weight": 2.0 + | }, + | { + | "exp": { + | "field": { + | "origin": "2013-09-17", + | "scale": "10d", + | "offset": "5d" + | }, + | "multi_value_mode": "max" + | }, + | "weight": 10.0 + | } + | ] + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("fuzzy") { + val query = fuzzy("stringField", "test") + val queryTs = fuzzy(TestDocument.stringField, "test") + val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") + val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) + val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) + val queryWithAllParameters = + fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) + val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") + + val expected = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedWithFuzzinessAuto = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "fuzziness": "AUTO" + | } + | } + |} + |""".stripMargin + + val expectedWithMaxExpansions = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "max_expansions": 50 + | } + | } + |} + |""".stripMargin + + val expectedWithPrefixLength = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "prefix_length": 3 + | } + | } + |} + |""".stripMargin + + val expectedWithAllParameters = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "fuzziness": "AUTO", + | "max_expansions": 50, + | "prefix_length": 3 + | } + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "fuzzy": { + | "stringField.raw": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithFuzzinessAuto.toJson(fieldPath = None))(equalTo(expectedWithFuzzinessAuto.toJson)) && + assert(queryWithMaxExpansions.toJson(fieldPath = None))(equalTo(expectedWithMaxExpansions.toJson)) && + assert(queryWithPrefixLength.toJson(fieldPath = None))(equalTo(expectedWithPrefixLength.toJson)) && + assert(queryWithAllParameters.toJson(fieldPath = None))(equalTo(expectedWithAllParameters.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) + }, + test("geoDistance") { + val queryWithHash = + geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) + val queryWithPoint = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + val queryWithDistanceType = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) + val queryWithName = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") + val queryWithValidationMethod = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + .validationMethod(IgnoreMalformed) + .distanceType(Plane) + .name("name") + + val expectedWithHash = + """ + |{ + | "geo_distance": { + | "geoPointField": "drm3btev3e86", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithDistance = + """ + |{ + | "geo_distance": { + | "distance": "200.0km", + | "geoPointField": "20.0,21.1" + | } + |} + |""".stripMargin + + val expectedWithDistanceType = + """ + |{ + | "geo_distance": { + | "distance_type" : "plane", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithName = + """ + |{ + | "geo_distance": { + | "_name": "name", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithValidationMethod = + """ + |{ + | "geo_distance": { + | "validation_method": "IGNORE_MALFORMED", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "geo_distance": { + | "validation_method": "IGNORE_MALFORMED", + | "distance_type" : "plane", + | "_name": "name", + | "distance": "200.0km", + | "geoPointField": "20.0,21.1" + | } + |} + |""".stripMargin + + assert(queryWithHash.toJson(fieldPath = None))(equalTo(expectedWithHash.toJson)) && + assert(queryWithPoint.toJson(fieldPath = None))(equalTo(expectedWithDistance.toJson)) && + assert(queryWithDistanceType.toJson(fieldPath = None))(equalTo(expectedWithDistanceType.toJson)) && + assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && + assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("geoPolygon") { + val query = + geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) + val queryTs = + geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) + val queryWithName = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") + val queryWithValidationMethod = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) + .validationMethod(IgnoreMalformed) + .name("name") + + val expected = + """ + |{ + | "geo_polygon": { + | "testField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "geo_polygon": { + | "stringField": { + | "points": ["drm3btev3e86", "drm3btev3e87"] + | } + | } + |} + |""".stripMargin + + val expectedWithName = + """ + |{ + | "geo_polygon": { + | "_name": "name", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedWithValidationMethod = + """ + |{ + | "geo_polygon": { + | "validation_method": "IGNORE_MALFORMED", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "geo_polygon": { + | "validation_method": "IGNORE_MALFORMED", + | "_name": "name", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && + assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("hasChild") { + val query = hasChild("child", matches(TestDocument.stringField, "test")) + val queryWithIgnoreUnmapped = hasChild("child", matches("field", "value")).ignoreUnmappedTrue + val queryWithInnerHits = hasChild("child", matches("field", "value")).innerHits + val queryWithMaxChildren = hasChild("child", matches("field", "value")).maxChildren(5) + val queryWithMinChildren = hasChild("child", matches("field", "value")).minChildren(1) + val queryWithScoreMode = hasChild("child", matches("field", "value")).scoreMode(ScoreMode.Avg) + val queryWithAllParams = hasChild("child", matches("field", "value")) + .scoreMode(ScoreMode.Avg) + .ignoreUnmappedTrue + .innerHits + .maxChildren(5) + .minChildren(1) + + val expected = + """ + |{ + | "has_child": { + | "type": "child", + | "query": { + | "match": { + | "stringField" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "has_child": { + | "type": "child", + | "ignore_unmapped": true, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "has_child": { + | "type": "child", + | "inner_hits": {}, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithMaxChildren = + """ + |{ + | "has_child": { + | "type": "child", + | "max_children": 5, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithMinChildren = + """ + |{ + | "has_child": { + | "type": "child", + | "min_children": 1, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithScoreMode = + """ + |{ + | "has_child": { + | "type": "child", + | "score_mode": "avg", + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "has_child": { + | "type": "child", + | "score_mode": "avg", + | "ignore_unmapped": true, + | "inner_hits": {}, + | "max_children": 5, + | "min_children": 1, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithMaxChildren.toJson(fieldPath = None))(equalTo(expectedWithMaxChildren.toJson)) && + assert(queryWithMinChildren.toJson(fieldPath = None))(equalTo(expectedWithMinChildren.toJson)) && + assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("hasParent") { + val query = hasParent("parent", matches(TestDocument.stringField, "test")) + val queryWithBoost = hasParent("parent", matches(TestDocument.stringField, "test")).boost(3) + val queryWithScore = hasParent("parent", matches("field", "test")).withScoreFalse + val queryWithIgnoreUnmapped = hasParent("parent", matches("field", "test")).ignoreUnmappedFalse + val queryWithScoreAndIgnoreUnmapped = + hasParent("parent", matches("field", "test")).withScoreTrue.ignoreUnmappedTrue + val queryWithInnerHits = hasParent("parent", matches("field", "test")).innerHits + val queryWithAllParams = hasParent("parent", matches(TestDocument.stringField, "test")) + .boost(3) + .withScoreFalse + .ignoreUnmappedFalse + .innerHits + val expected = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | }, + | "boost": 3.0 + | } + |} + |""".stripMargin + + val expectedWithScore = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "score": false, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "ignore_unmapped": false, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithScoreAndIgnoreUnmapped = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "score": true, + | "ignore_unmapped": true, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "field" : "test" + | } + | }, + | "inner_hits": {} + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | }, + | "boost": 3.0, + | "ignore_unmapped": false, + | "score": false, + | "inner_hits": {} + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithScore.toJson(fieldPath = None))(equalTo(expectedWithScore.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithScoreAndIgnoreUnmapped.toJson(fieldPath = None))( + equalTo(expectedWithScoreAndIgnoreUnmapped.toJson) + ) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("ids") { + val query = ids("1", "2", "3") + + val expected = + """ + |{ + | "ids": { + | "values": ["1", "2", "3"] + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("kNN") { + val queryString = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBool = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryInt = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) + + val expectedString = + """ + |{ + | "field": "stringField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "field": "booleanField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "field": "intField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedWithSimilarity = + """ + |{ + | "field": "stringField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10, + | "similarity": 3.14 + |} + |""".stripMargin + + assert(queryString.toJson)(equalTo(expectedString.toJson)) && + assert(queryBool.toJson)(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson)(equalTo(expectedInt.toJson)) && + assert(queryWithSimilarity.toJson)(equalTo(expectedWithSimilarity.toJson)) + }, + test("matchAll") { + val query = matchAll + val queryWithBoost = matchAll.boost(3.14) + + val expected = + """ + |{ + | "match_all": {} + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "match_all": { + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("matchBooleanPrefix") { + val queryString = matchBooleanPrefix("stringField", "test") + val queryBool = matchBooleanPrefix("booleanField", true) + val queryInt = matchBooleanPrefix("intField", 1) + val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") + val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) + val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) + val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") + val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) + + val expectedString = + """ + |{ + | "match_bool_prefix": { + | "stringField": "test" + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "match_bool_prefix": { + | "booleanField": true + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "match_bool_prefix": { + | "intField": 1 + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "match_bool_prefix": { + | "stringField.raw": "test" + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "match_bool_prefix": { + | "stringField": { + | "query": "test", + | "minimum_should_match": 3 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))(equalTo(expectedWithMinimumShouldMatch.toJson)) + }, + test("matches") { + val query = matches("testField", true) + val queryTsInt = matches(TestDocument.intField, 39) + val queryTsString = matches(TestDocument.stringField, "test") + + val expected = + """ + |{ + | "match": { + | "testField": true + | } + |} + |""".stripMargin + + val expectedTsInt = + """ + |{ + | "match": { + | "intField": 39 + | } + |} + |""".stripMargin + + val expectedTsString = + """ + |{ + | "match": { + | "stringField": "test" + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTsInt.toJson(fieldPath = None))(equalTo(expectedTsInt.toJson)) && + assert(queryTsString.toJson(fieldPath = None))(equalTo(expectedTsString.toJson)) + }, + test("matchPhrase") { + val querySimple = matchPhrase("stringField", "this is a test") + val queryRaw = matchPhrase("stringField.raw", "this is a test") + val querySimpleTs = matchPhrase(TestDocument.stringField, "this is a test") + val queryRawTs = matchPhrase(TestDocument.stringField.raw, "this is a test") + val querySimpleTsWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) + + val expectedSimple = + """ + |{ + | "match_phrase": { + | "stringField": "this is a test" + | } + |} + |""".stripMargin + + val expectedRaw = + """ + |{ + | "match_phrase": { + | "stringField.raw": "this is a test" + | } + |} + |""".stripMargin + + val expectedSimpleTsWithBoost = + """ + |{ + | "match_phrase": { + | "stringField": { + | "query": "this is a test", + | "boost": 3.0 + | } + | } + |} + |""".stripMargin + + assert(querySimple.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && + assert(querySimpleTs.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && + assert(queryRaw.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && + assert(queryRawTs.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && + assert(querySimpleTsWithBoost.toJson(fieldPath = None))(equalTo(expectedSimpleTsWithBoost.toJson)) + }, + test("matchPhrasePrefix") { + val query = matchPhrasePrefix("stringField", "test") + val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") + + val expected = + """ + |{ + | "match_phrase_prefix": { + | "stringField": { + | "query" : "test" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("multiMatch") { + val query = multiMatch("this is a test") + val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") + val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) + val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) + val queryWithType = multiMatch("this is a test").matchingType(BestFields) + val queryWithBoost = multiMatch("this is a test").boost(2.2) + val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) + val queryWithAllParams = multiMatch("this is a test") + .fields(TestDocument.stringField) + .matchingType(BestFields) + .boost(2.2) + .minimumShouldMatch(2) + + val expected = + """ + |{ + | "multi_match": { + | "query": "this is a test" + | } + |} + |""".stripMargin + + val expectedWithFields = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField1", "stringField2" ] + | } + |} + |""".stripMargin + + val expectedWithFieldsTs = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField" ] + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField.raw" ] + | } + |} + |""".stripMargin + + val expectedWithType = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "type": "best_fields" + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "boost": 2.2 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "type": "best_fields", + | "fields": [ "stringField" ], + | "boost": 2.2, + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithFields.toJson(fieldPath = None))(equalTo(expectedWithFields.toJson)) && + assert(queryWithFieldsTs.toJson(fieldPath = None))(equalTo(expectedWithFieldsTs.toJson)) && + assert(queryWithFieldsSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && + assert(queryWithType.toJson(fieldPath = None))(equalTo(expectedWithType.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("nested") { + val query = nested(TestDocument.subDocumentList, matchAll) + val queryWithNested = nested(TestDocument.subDocumentList, nested("items", term("testField", "test"))) + val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue + val queryWithInnerHits = + nested(TestDocument.subDocumentList, matchAll).innerHits( + InnerHits() + .from(0) + .size(3) + .name("innerHitName") + .highlights(highlight("stringField")) + .excludes("longField") + .includes("intField") + ) + val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits + val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse + .innerHits(InnerHits().from(10).size(20).name("innerHitName")) + .scoreMode(ScoreMode.Min) + + val expected = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | } + | } + |} + |""".stripMargin + + val expectedWithNested = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "nested": { + | "path": "subDocumentList.items", + | "query": { + | "term": { + | "subDocumentList.items.testField": { + | "value": "test" + | } + | } + | } + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "ignore_unmapped": true + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "inner_hits": { + | "from": 0, + | "size": 3, + | "name": "innerHitName", + | "highlight" : { + | "fields" : { + | "subDocumentList.stringField" : {} + | } + | }, + | "_source" : { + | "includes" : [ + | "intField" + | ], + | "excludes" : [ + | "longField" + | ] + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHitsEmpty = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "inner_hits": {} + | } + |} + |""".stripMargin + + val expectedWithScoreMode = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "score_mode": "avg" + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "ignore_unmapped": false, + | "score_mode": "min", + | "inner_hits": { + | "from": 10, + | "size": 20, + | "name": "innerHitName" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithNested.toJson(fieldPath = None))(equalTo(expectedWithNested.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithInnerHitsEmpty.toJson(fieldPath = None))(equalTo(expectedWithInnerHitsEmpty.toJson)) && + assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("prefix") { + val query = prefix(TestDocument.stringField, "test") + val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue + + val expected = + """ + |{ + | "prefix": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "prefix": { + | "stringField": { + | "value": "test", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) + }, + test("range") { + val queryEmpty = range(TestDocument.intField) + val queryEmptyWithBoost = range(TestDocument.intField).boost(3.14) + val queryLowerBound = range(TestDocument.intField).gt(23) + val queryUpperBound = range(TestDocument.intField).lt(45) + val queryInclusiveLowerBound = range(TestDocument.intField).gte(23) + val queryInclusiveUpperBound = range(TestDocument.intField).lte(45) + val queryMixedBounds = range(TestDocument.intField).gt(10).lte(99) + val queryMixedBoundsWithBoost = range(TestDocument.intField).gt(10).lte(99).boost(3.14) + val queryWithFormat = range(TestDocument.dateField).gt(LocalDate.of(2020, 1, 10)).format("yyyy-MM-dd") + + val expectedEmpty = + """ + |{ + | "range": { + | "intField": { + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "range": { + | "intField": { + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedLowerBound = + """ + |{ + | "range": { + | "intField": { + | "gt": 23 + | } + | } + |} + |""".stripMargin + + val expectedUpperBound = + """ + |{ + | "range": { + | "intField": { + | "lt": 45 + | } + | } + |} + |""".stripMargin + + val expectedInclusiveLowerBound = + """ + |{ + | "range": { + | "intField": { + | "gte": 23 + | } + | } + |} + |""".stripMargin + + val expectedInclusiveUpperBound = + """ + |{ + | "range": { + | "intField": { + | "lte": 45 + | } + | } + |} + |""".stripMargin + + val expectedMixedBounds = + """ + |{ + | "range": { + | "intField": { + | "gt": 10, + | "lte": 99 + | } + | } + |} + |""".stripMargin + + val expectedMixedBoundsWithBoost = + """ + |{ + | "range": { + | "intField": { + | "gt": 10, + | "lte": 99, + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithFormat = + """ + |{ + | "range": { + | "dateField": { + | "gt": "2020-01-10", + | "format": "yyyy-MM-dd" + | } + | } + |} + |""".stripMargin + + assert(queryEmpty.toJson(fieldPath = None))(equalTo(expectedEmpty.toJson)) && + assert(queryEmptyWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryLowerBound.toJson(fieldPath = None))(equalTo(expectedLowerBound.toJson)) && + assert(queryUpperBound.toJson(fieldPath = None))(equalTo(expectedUpperBound.toJson)) && + assert(queryInclusiveLowerBound.toJson(fieldPath = None))(equalTo(expectedInclusiveLowerBound.toJson)) && + assert(queryInclusiveUpperBound.toJson(fieldPath = None))(equalTo(expectedInclusiveUpperBound.toJson)) && + assert(queryMixedBounds.toJson(fieldPath = None))(equalTo(expectedMixedBounds.toJson)) && + assert(queryMixedBoundsWithBoost.toJson(fieldPath = None))(equalTo(expectedMixedBoundsWithBoost.toJson)) && + assert(queryWithFormat.toJson(fieldPath = None))(equalTo(expectedWithFormat.toJson)) + }, + test("regexp") { + val query = regexp("stringField", "t.*st") + val queryTs = regexp(TestDocument.stringField, "t.*st") + val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue + val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") + + val expected = + """ + |{ + | "regexp": { + | "stringField": { + | "value": "t.*st" + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "regexp": { + | "stringField": { + | "value": "t.*st", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "regexp": { + | "stringField.raw": { + | "value": "t.*st" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) + }, + test("script") { + val query = + ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) + val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) + + val expected = + """ + |{ + | "script": { + | "script": { + | "lang": "painless", + | "source": "doc['day_of_week'].value > params['day']", + | "params": { + | "day": 2 + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "script": { + | "script": { + | "source": "doc['day_of_week'].value > 2" + | }, + | "boost": 2.0 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("startsWith") { + val query = startsWith(TestDocument.stringField, "test") + val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(3.14) + val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = startsWith(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("term") { + val queryString = term(TestDocument.stringField, "test") + val queryBool = term(TestDocument.booleanField, true) + val queryInt = term(TestDocument.intField, 21) + val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + val expectedString = + """ + |{ + | "term": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "term": { + | "booleanField": { + | "value": true + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "term": { + | "intField": { + | "value": 21 + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "boost": 10.21 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "boost": 3.14, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("terms") { + val queryString = terms(TestDocument.stringField, "a", "b", "c") + val queryBool = terms(TestDocument.booleanField, true, false) + val queryInt = terms(TestDocument.intField, 1, 2, 3, 4) + val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) + + val expectedString = + """ + |{ + | "terms": { + | "stringField": [ "a", "b", "c" ] + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms": { + | "booleanField": [ true, false ] + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms": { + | "intField": [ 1, 2, 3, 4 ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms": { + | "stringField": [ "a", "b", "c" ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("termsSet") { + val queryString = + termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") + val queryBool = + termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) + val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) + val queryStringTs = termsSet( + field = TestDocument.stringField, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSet( + field = TestDocument.booleanField, + minimumShouldMatchField = TestDocument.booleanField, + terms = true, + false + ) + val queryIntTs = + termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) + val queryWithBoost = + termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) + + val expectedString = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedStringTs = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_field": "stringField" + | } + | } + |} + |""".stripMargin + + val expectedBoolTs = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_field": "booleanField" + | } + | } + |} + |""".stripMargin + + val expectedIntTs = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3 ], + | "minimum_should_match_field": "intField" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3 ], + | "minimum_should_match_field": "required_matches", + | "boost": 10.0 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) + }, + test("termsSetScript") { + val queryString = termsSetScript( + field = "stringField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBool = termsSetScript( + field = "booleanField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryInt = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ) + val queryStringTs = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSetScript( + field = TestDocument.booleanField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryIntTs = termsSetScript( + field = TestDocument.intField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ) + val queryWithBoost = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ).boost(10.0) + + val expectedString = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedStringTs = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedBoolTs = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedIntTs = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | }, + | "boost": 10.0 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("wildcard") { + val query = wildcard(TestDocument.stringField, "[a-zA-Z]+") + val queryWithBoost = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(3.14) + val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "[a-zA-Z]+").caseInsensitiveTrue + val queryWithAllParams = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + } + ) + ) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala index a863ea7b9..04d3607c5 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala @@ -1,87 +1,87 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.elasticsearch.domain.{TestNestedField, TestSubDocument} -import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assertTrue} - -object FieldDSLSpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("Field DSL")( - suite("constructing")( - test("field") { - val encodeName = "name" - - val encodeNestedNumber = "number" - val encodeNestedAddress = "address" - val encodeNestedExpectedVal = "address.number" - - val encodeSuffixName = "name" - val encodeSuffixKeyword = "keyword" - val encodeSuffixExpectedVal = "name.keyword" - - val encodeSingleFieldSuffixName = "name" - val encodeSingleFieldSuffixMultiField = "multi_field" - val encodeSingleFieldSuffixKeyword = "keyword" - val encodeSingleFieldSuffixExpectedVal = "name.multi_field.keyword" - - val encodeSingleFieldKeywordSuffixName = "name" - val encodeSingleFieldKeywordSuffixExpectedVal = "name.keyword" - - val encodeSingleFieldRawSuffixKeyword = "name" - val encodeSingleFieldRawSuffixExpectedVal = "name.raw" - - assertTrue(Field(None, encodeName).toString == encodeName) && assertTrue( - Field(Some(Field(None, encodeNestedAddress)), encodeNestedNumber).toString == encodeNestedExpectedVal - ) && assertTrue( - Field[Any, String](None, encodeSuffixName).suffix(encodeSuffixKeyword).toString == encodeSuffixExpectedVal - ) && assertTrue( - Field[Any, String](None, encodeSingleFieldSuffixName) - .suffix(encodeSingleFieldSuffixMultiField) - .suffix(encodeSingleFieldSuffixKeyword) - .toString == encodeSingleFieldSuffixExpectedVal - ) && assertTrue( - Field[Any, String]( - None, - encodeSingleFieldKeywordSuffixName - ).keyword.toString == encodeSingleFieldKeywordSuffixExpectedVal - ) && assertTrue( - Field[Any, String]( - None, - encodeSingleFieldRawSuffixKeyword - ).raw.toString == encodeSingleFieldRawSuffixExpectedVal - ) - }, - test("path") { - val singleFieldExpectedVal = "stringField" - - val nestedFieldAccessorsExpectedVal = "nestedField.longField" - - val singleFieldSuffixAccessorKeyword = "keyword" - val singleFieldSuffixAccessorExpectedVal = "stringField.keyword" - - assertTrue(TestSubDocument.stringField.toString == singleFieldExpectedVal) && assertTrue( - (TestSubDocument.nestedField / TestNestedField.longField).toString == nestedFieldAccessorsExpectedVal - ) && assertTrue( - TestSubDocument.stringField - .suffix(singleFieldSuffixAccessorKeyword) - .toString == singleFieldSuffixAccessorExpectedVal - ) - } - ) - ) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.elasticsearch.domain.{TestNestedField, TestSubDocument} +import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assertTrue} + +object FieldDSLSpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("Field DSL")( + suite("constructing")( + test("field") { + val encodeName = "name" + + val encodeNestedNumber = "number" + val encodeNestedAddress = "address" + val encodeNestedExpectedVal = "address.number" + + val encodeSuffixName = "name" + val encodeSuffixKeyword = "keyword" + val encodeSuffixExpectedVal = "name.keyword" + + val encodeSingleFieldSuffixName = "name" + val encodeSingleFieldSuffixMultiField = "multi_field" + val encodeSingleFieldSuffixKeyword = "keyword" + val encodeSingleFieldSuffixExpectedVal = "name.multi_field.keyword" + + val encodeSingleFieldKeywordSuffixName = "name" + val encodeSingleFieldKeywordSuffixExpectedVal = "name.keyword" + + val encodeSingleFieldRawSuffixKeyword = "name" + val encodeSingleFieldRawSuffixExpectedVal = "name.raw" + + assertTrue(Field(None, encodeName).toString == encodeName) && assertTrue( + Field(Some(Field(None, encodeNestedAddress)), encodeNestedNumber).toString == encodeNestedExpectedVal + ) && assertTrue( + Field[Any, String](None, encodeSuffixName).suffix(encodeSuffixKeyword).toString == encodeSuffixExpectedVal + ) && assertTrue( + Field[Any, String](None, encodeSingleFieldSuffixName) + .suffix(encodeSingleFieldSuffixMultiField) + .suffix(encodeSingleFieldSuffixKeyword) + .toString == encodeSingleFieldSuffixExpectedVal + ) && assertTrue( + Field[Any, String]( + None, + encodeSingleFieldKeywordSuffixName + ).keyword.toString == encodeSingleFieldKeywordSuffixExpectedVal + ) && assertTrue( + Field[Any, String]( + None, + encodeSingleFieldRawSuffixKeyword + ).raw.toString == encodeSingleFieldRawSuffixExpectedVal + ) + }, + test("path") { + val singleFieldExpectedVal = "stringField" + + val nestedFieldAccessorsExpectedVal = "nestedField.longField" + + val singleFieldSuffixAccessorKeyword = "keyword" + val singleFieldSuffixAccessorExpectedVal = "stringField.keyword" + + assertTrue(TestSubDocument.stringField.toString == singleFieldExpectedVal) && assertTrue( + (TestSubDocument.nestedField / TestNestedField.longField).toString == nestedFieldAccessorsExpectedVal + ) && assertTrue( + TestSubDocument.stringField + .suffix(singleFieldSuffixAccessorKeyword) + .toString == singleFieldSuffixAccessorExpectedVal + ) + } + ) + ) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala index fdd847540..5545b7868 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala @@ -1,298 +1,298 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticAggregation.termsAggregation -import zio.elasticsearch.ElasticQuery.{kNN, matchAll, term} -import zio.elasticsearch.domain.TestDocument -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.executor.response.{BulkResponse, CreateBulkResponse, Shards} -import zio.elasticsearch.request.CreationOutcome.Created -import zio.elasticsearch.request.DeletionOutcome.Deleted -import zio.elasticsearch.request.UpdateConflicts.Proceed -import zio.elasticsearch.request.UpdateOutcome -import zio.elasticsearch.result.{TermsAggregationBucketResult, TermsAggregationResult, UpdateByQueryResult} -import zio.elasticsearch.script.Script -import zio.test.Assertion._ -import zio.test.{Spec, TestEnvironment, TestResultZIOOps, assertZIO} - -object HttpElasticExecutorSpec extends SttpBackendStubSpec { - def spec: Spec[TestEnvironment, Any] = - suite("HttpElasticExecutor")( - test("aggregation") { - val executorAggregate = - Executor - .execute(ElasticRequest.aggregate(index, termsAggregation(name = "aggregation1", field = "name"))) - .aggregations - - val expectedTermsAggregationResult = - Map( - "aggregation1" -> TermsAggregationResult( - docErrorCount = 0, - sumOtherDocCount = 0, - buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) - ) - ) - - assertZIO(executorAggregate)(equalTo(expectedTermsAggregationResult)) - }, - test("bulk") { - val executorBulk = Executor.execute(ElasticRequest.bulk(ElasticRequest.create(index, doc)).refreshTrue) - - val expectedBulkResponse = - BulkResponse( - took = 3, - errors = false, - items = Chunk( - CreateBulkResponse( - index = "repositories", - id = "123", - version = Some(1), - result = Some("created"), - shards = Some(Shards(total = 1, successful = 1, failed = 0)), - status = Some(201), - error = None - ) - ) - ) - - assertZIO(executorBulk)(equalTo(expectedBulkResponse)) - }, - test("count") { - val executorCount = Executor.execute(ElasticRequest.count(index, matchAll).routing(Routing("routing"))) - - assertZIO(executorCount)(equalTo(2)) - }, - test("create") { - val executorCreate = - Executor - .execute( - ElasticRequest - .create[TestDocument](index = index, doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorCreate)(equalTo(DocumentId("V4x8q4UB3agN0z75fv5r"))) - }, - test("create with ID") { - val executorCreateDocumentId = - Executor.execute( - ElasticRequest - .create[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorCreateDocumentId)(equalTo(Created)) - }, - test("createIndex") { - val executorCreateIndex = Executor.execute(ElasticRequest.createIndex(index = index)) - - val mapping = - """ - |{ - | "settings": { - | "index": { - | "number_of_shards": 1 - | } - | }, - | "mappings": { - | "_routing": { - | "required": true - | }, - | "properties": { - | "id": { - | "type": "keyword" - | } - | } - | } - |} - |""".stripMargin - val executorCreateIndexMapping = - Executor.execute(ElasticRequest.createIndex(index = index, definition = mapping)) - - assertZIO(executorCreateIndex)(equalTo(Created)) && - assertZIO(executorCreateIndexMapping)(equalTo(Created)) - }, - test("deleteById") { - val executorDeleteById = - Executor.execute( - ElasticRequest - .deleteById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorDeleteById)(equalTo(Deleted)) - }, - test("deleteByQuery") { - val executorDeleteByQuery = - Executor.execute( - ElasticRequest.deleteByQuery(index = index, query = matchAll).refreshTrue.routing(Routing("routing")) - ) - - assertZIO(executorDeleteByQuery)(equalTo(Deleted)) - }, - test("deleteIndex") { - val executorDeleteIndex = Executor.execute(ElasticRequest.deleteIndex(index = index)) - - assertZIO(executorDeleteIndex)(equalTo(Deleted)) - }, - test("exists") { - val executorExists = - Executor.execute( - ElasticRequest - .exists(index = index, id = DocumentId("example-id")) - .routing(Routing("routing")) - ) - - assertZIO(executorExists)(isTrue) - }, - test("getById") { - val executorGetById = - Executor - .execute( - ElasticRequest - .getById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) - .routing(Routing("routing")) - ) - .documentAs[TestDocument] - - assertZIO(executorGetById)(isSome(equalTo(doc))) - }, - test("knnSearch") { - val executorSearch = - Executor - .execute( - ElasticRequest - .knnSearch(selectors = index, query = kNN(TestDocument.vectorField, 2, 5, Chunk(-5.0, 9.0, -12.0))) - ) - .documentAs[TestDocument] - assertZIO(executorSearch)(equalTo(Chunk(doc))) - }, - test("refresh") { - val executorRefresh = Executor.execute(ElasticRequest.refresh(selectors = index)) - assertZIO(executorRefresh)(equalTo(true)) - }, - test("search") { - val executorSearch = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll)) - .documentAs[TestDocument] - val terms = termsAggregation(name = "aggregation1", field = "name") - val executorSearchWithTerms = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) - .documentAs[TestDocument] - - assertZIO(executorSearch)(equalTo(Chunk(doc))) && assertZIO(executorSearchWithTerms)(equalTo(Chunk(doc))) - }, - test("search + aggregate") { - val terms = termsAggregation(name = "aggregation1", field = "name") - val executorSearchAggregations = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) - .aggregations - - val expectedTermsAggregationResult = - Map( - "aggregation1" -> TermsAggregationResult( - docErrorCount = 0, - sumOtherDocCount = 0, - buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) - ) - ) - - assertZIO(executorSearchAggregations)(equalTo(expectedTermsAggregationResult)) - }, - test("update") { - val executorUpdate = - Executor.execute( - ElasticRequest - .update[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .orCreate(doc = secondDoc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpdate)(equalTo(UpdateOutcome.Updated)) - }, - test("updateAllByQuery") { - val executorUpdateAllByQuery = - Executor.execute( - ElasticRequest - .updateAllByQuery(index = index, script = Script("ctx._source['intField']++")) - .conflicts(Proceed) - .routing(Routing("routing")) - .refreshTrue - ) - - val expectedUpdateByQueryResult = - UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) - - assertZIO(executorUpdateAllByQuery)(equalTo(expectedUpdateByQueryResult)) - }, - test("updateByQuery") { - val executorUpdateByQuery = - Executor.execute( - ElasticRequest - .updateByQuery( - index = index, - query = term(field = TestDocument.stringField.keyword, value = "StringField"), - script = Script("ctx._source['intField']++") - ) - .conflicts(Proceed) - .routing(Routing("routing")) - .refreshTrue - ) - - val expectedUpdateByQueryResult = - UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) - - assertZIO(executorUpdateByQuery)(equalTo(expectedUpdateByQueryResult)) - }, - test("updateByScript") { - val executorUpdateByScript = - Executor.execute( - ElasticRequest - .updateByScript( - index = index, - id = DocumentId("V4x8q4UB3agN0z75fv5r"), - script = Script("ctx._source.intField += params['factor']").params("factor" -> 2) - ) - .orCreate(doc = secondDoc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpdateByScript)(equalTo(UpdateOutcome.Updated)) - }, - test("upsert") { - val executorUpsert = - Executor.execute( - ElasticRequest - .upsert[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpsert)(isUnit) - } - ).provideShared(elasticsearchSttpLayer) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticAggregation.termsAggregation +import zio.elasticsearch.ElasticQuery.{kNN, matchAll, term} +import zio.elasticsearch.domain.TestDocument +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.executor.response.{BulkResponse, CreateBulkResponse, Shards} +import zio.elasticsearch.request.CreationOutcome.Created +import zio.elasticsearch.request.DeletionOutcome.Deleted +import zio.elasticsearch.request.UpdateConflicts.Proceed +import zio.elasticsearch.request.UpdateOutcome +import zio.elasticsearch.result.{TermsAggregationBucketResult, TermsAggregationResult, UpdateByQueryResult} +import zio.elasticsearch.script.Script +import zio.test.Assertion._ +import zio.test.{Spec, TestEnvironment, TestResultZIOOps, assertZIO} + +object HttpElasticExecutorSpec extends SttpBackendStubSpec { + def spec: Spec[TestEnvironment, Any] = + suite("HttpElasticExecutor")( + test("aggregation") { + val executorAggregate = + Executor + .execute(ElasticRequest.aggregate(index, termsAggregation(name = "aggregation1", field = "name"))) + .aggregations + + val expectedTermsAggregationResult = + Map( + "aggregation1" -> TermsAggregationResult( + docErrorCount = 0, + sumOtherDocCount = 0, + buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) + ) + ) + + assertZIO(executorAggregate)(equalTo(expectedTermsAggregationResult)) + }, + test("bulk") { + val executorBulk = Executor.execute(ElasticRequest.bulk(ElasticRequest.create(index, doc)).refreshTrue) + + val expectedBulkResponse = + BulkResponse( + took = 3, + errors = false, + items = Chunk( + CreateBulkResponse( + index = "repositories", + id = "123", + version = Some(1), + result = Some("created"), + shards = Some(Shards(total = 1, successful = 1, failed = 0)), + status = Some(201), + error = None + ) + ) + ) + + assertZIO(executorBulk)(equalTo(expectedBulkResponse)) + }, + test("count") { + val executorCount = Executor.execute(ElasticRequest.count(index, matchAll).routing(Routing("routing"))) + + assertZIO(executorCount)(equalTo(2)) + }, + test("create") { + val executorCreate = + Executor + .execute( + ElasticRequest + .create[TestDocument](index = index, doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorCreate)(equalTo(DocumentId("V4x8q4UB3agN0z75fv5r"))) + }, + test("create with ID") { + val executorCreateDocumentId = + Executor.execute( + ElasticRequest + .create[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorCreateDocumentId)(equalTo(Created)) + }, + test("createIndex") { + val executorCreateIndex = Executor.execute(ElasticRequest.createIndex(index = index)) + + val mapping = + """ + |{ + | "settings": { + | "index": { + | "number_of_shards": 1 + | } + | }, + | "mappings": { + | "_routing": { + | "required": true + | }, + | "properties": { + | "id": { + | "type": "keyword" + | } + | } + | } + |} + |""".stripMargin + val executorCreateIndexMapping = + Executor.execute(ElasticRequest.createIndex(index = index, definition = mapping)) + + assertZIO(executorCreateIndex)(equalTo(Created)) && + assertZIO(executorCreateIndexMapping)(equalTo(Created)) + }, + test("deleteById") { + val executorDeleteById = + Executor.execute( + ElasticRequest + .deleteById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorDeleteById)(equalTo(Deleted)) + }, + test("deleteByQuery") { + val executorDeleteByQuery = + Executor.execute( + ElasticRequest.deleteByQuery(index = index, query = matchAll).refreshTrue.routing(Routing("routing")) + ) + + assertZIO(executorDeleteByQuery)(equalTo(Deleted)) + }, + test("deleteIndex") { + val executorDeleteIndex = Executor.execute(ElasticRequest.deleteIndex(index = index)) + + assertZIO(executorDeleteIndex)(equalTo(Deleted)) + }, + test("exists") { + val executorExists = + Executor.execute( + ElasticRequest + .exists(index = index, id = DocumentId("example-id")) + .routing(Routing("routing")) + ) + + assertZIO(executorExists)(isTrue) + }, + test("getById") { + val executorGetById = + Executor + .execute( + ElasticRequest + .getById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) + .routing(Routing("routing")) + ) + .documentAs[TestDocument] + + assertZIO(executorGetById)(isSome(equalTo(doc))) + }, + test("knnSearch") { + val executorSearch = + Executor + .execute( + ElasticRequest + .knnSearch(selectors = index, query = kNN(TestDocument.vectorField, 2, 5, Chunk(-5.0, 9.0, -12.0))) + ) + .documentAs[TestDocument] + assertZIO(executorSearch)(equalTo(Chunk(doc))) + }, + test("refresh") { + val executorRefresh = Executor.execute(ElasticRequest.refresh(selectors = index)) + assertZIO(executorRefresh)(equalTo(true)) + }, + test("search") { + val executorSearch = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll)) + .documentAs[TestDocument] + val terms = termsAggregation(name = "aggregation1", field = "name") + val executorSearchWithTerms = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) + .documentAs[TestDocument] + + assertZIO(executorSearch)(equalTo(Chunk(doc))) && assertZIO(executorSearchWithTerms)(equalTo(Chunk(doc))) + }, + test("search + aggregate") { + val terms = termsAggregation(name = "aggregation1", field = "name") + val executorSearchAggregations = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) + .aggregations + + val expectedTermsAggregationResult = + Map( + "aggregation1" -> TermsAggregationResult( + docErrorCount = 0, + sumOtherDocCount = 0, + buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) + ) + ) + + assertZIO(executorSearchAggregations)(equalTo(expectedTermsAggregationResult)) + }, + test("update") { + val executorUpdate = + Executor.execute( + ElasticRequest + .update[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .orCreate(doc = secondDoc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpdate)(equalTo(UpdateOutcome.Updated)) + }, + test("updateAllByQuery") { + val executorUpdateAllByQuery = + Executor.execute( + ElasticRequest + .updateAllByQuery(index = index, script = Script("ctx._source['intField']++")) + .conflicts(Proceed) + .routing(Routing("routing")) + .refreshTrue + ) + + val expectedUpdateByQueryResult = + UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) + + assertZIO(executorUpdateAllByQuery)(equalTo(expectedUpdateByQueryResult)) + }, + test("updateByQuery") { + val executorUpdateByQuery = + Executor.execute( + ElasticRequest + .updateByQuery( + index = index, + query = term(field = TestDocument.stringField.keyword, value = "StringField"), + script = Script("ctx._source['intField']++") + ) + .conflicts(Proceed) + .routing(Routing("routing")) + .refreshTrue + ) + + val expectedUpdateByQueryResult = + UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) + + assertZIO(executorUpdateByQuery)(equalTo(expectedUpdateByQueryResult)) + }, + test("updateByScript") { + val executorUpdateByScript = + Executor.execute( + ElasticRequest + .updateByScript( + index = index, + id = DocumentId("V4x8q4UB3agN0z75fv5r"), + script = Script("ctx._source.intField += params['factor']").params("factor" -> 2) + ) + .orCreate(doc = secondDoc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpdateByScript)(equalTo(UpdateOutcome.Updated)) + }, + test("upsert") { + val executorUpsert = + Executor.execute( + ElasticRequest + .upsert[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpsert)(isUnit) + } + ).provideShared(elasticsearchSttpLayer) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala index 895364f25..c2de29d72 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala @@ -1,96 +1,96 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.elasticsearch.utils.unsafeWrap -import zio.prelude.Validation -import zio.test.Assertion.equalTo -import zio.test._ - -object IndexNameSpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("IndexName")( - suite("constructing")( - test("fail for empty string") { - val name = "" - - assert(IndexName.make(name))(equalTo(Validation.fail(indexNameFailureMessage(name)))) - }, - test("fail for string '.'") { - val invalidName = "." - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - }, - test("fail for string containing character '*'") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"$part1*$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string containing character ':'") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"$part1:$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string containing upper letter") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"${part1}A$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string longer than 255 bytes") { - check(genString(256, 300)) { invalidName => - assert(IndexName.make(invalidName))( - equalTo(Validation.fail(indexNameFailureMessage(invalidName))) - ) - } - }, - test("fail for string starting with character '-'") { - check(genString(1, 255)) { name => - val invalidName = s"-$name" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("succeed for valid string") { - check(genString(1, 255)) { name => - assert(IndexName.make(name))(equalTo(Validation.succeed(unsafeWrap(name)(IndexName)))) - } - } - ) - ) - - private def indexNameFailureMessage(name: String): String = - s"$name did not satisfy " + - s""" - | - Must be lower case only - | - Cannot include \\, /, *, ?, ", <, >, |, ` `(space character), `,`(comma), #. - | - Cannot include ":"(since 7.0). - | - Cannot be empty - | - Cannot start with -, _, +. - | - Cannot be `.` or `..`. - | - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster). - | - Names starting with . are deprecated, except for hidden indices and internal indices managed by plugins. - |""".stripMargin - - private def genString(min: Int, max: Int): Gen[Any, String] = - Gen.stringBounded(min, max)(Gen.alphaChar).map(_.toLowerCase) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.elasticsearch.utils.unsafeWrap +import zio.prelude.Validation +import zio.test.Assertion.equalTo +import zio.test._ + +object IndexNameSpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("IndexName")( + suite("constructing")( + test("fail for empty string") { + val name = "" + + assert(IndexName.make(name))(equalTo(Validation.fail(indexNameFailureMessage(name)))) + }, + test("fail for string '.'") { + val invalidName = "." + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + }, + test("fail for string containing character '*'") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"$part1*$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string containing character ':'") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"$part1:$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string containing upper letter") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"${part1}A$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string longer than 255 bytes") { + check(genString(256, 300)) { invalidName => + assert(IndexName.make(invalidName))( + equalTo(Validation.fail(indexNameFailureMessage(invalidName))) + ) + } + }, + test("fail for string starting with character '-'") { + check(genString(1, 255)) { name => + val invalidName = s"-$name" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("succeed for valid string") { + check(genString(1, 255)) { name => + assert(IndexName.make(name))(equalTo(Validation.succeed(unsafeWrap(name)(IndexName)))) + } + } + ) + ) + + private def indexNameFailureMessage(name: String): String = + s"$name did not satisfy " + + s""" + | - Must be lower case only + | - Cannot include \\, /, *, ?, ", <, >, |, ` `(space character), `,`(comma), #. + | - Cannot include ":"(since 7.0). + | - Cannot be empty + | - Cannot start with -, _, +. + | - Cannot be `.` or `..`. + | - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster). + | - Names starting with . are deprecated, except for hidden indices and internal indices managed by plugins. + |""".stripMargin + + private def genString(min: Int, max: Int): Gen[Any, String] = + Gen.stringBounded(min, max)(Gen.alphaChar).map(_.toLowerCase) +} From bb294325e18063e18cf26f75633c76852933604a Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 17:33:59 +0200 Subject: [PATCH 07/15] Add tests in ElasticAggregationSpec --- .../ElasticAggregationSpec.scala | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala index 202950431..4130e6e3b 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala @@ -150,6 +150,32 @@ object ElasticAggregationSpec extends ZIOSpecDefault { equalTo(ExtendedStats(name = "aggregation", field = "intField", missing = Some(20.0), sigma = Some(3.0))) ) }, + test("ipRange") { + val aggregation = + ipRangeAggregation( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("10.0.0.5")), + IpRange.IpRangeBound(from = Some("10.0.0.5")) + ) + ) + + assert(aggregation)( + equalTo( + IpRange( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("10.0.0.5")), + IpRange.IpRangeBound(from = Some("10.0.0.5")) + ), + keyed = None, + subAggregations = None + ) + ) + ) + }, test("filter") { val query = term(TestDocument.stringField, "test") val aggregation = filterAggregation("aggregation", query) @@ -967,6 +993,73 @@ object ElasticAggregationSpec extends ZIOSpecDefault { assert(aggregationWithSubAggregation.toJson)(equalTo(expectedWithSubAggregation.toJson)) && assert(aggregationWithMultipleSubAggregations.toJson)(equalTo(expectedWithMultipleSubAggregations.toJson)) }, + test("ip_range aggregation with from/to ") { + val agg = IpRange( + name = "ip_range_agg", + field = "ip", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("10.0.0.5")), + IpRange.IpRangeBound(from = Some("10.0.0.5")) + ), + keyed = None, + subAggregations = None + ) + + val expectedJson = + """ + |{ + | "ip_range_agg": { + | "ip_range": { + | "field": "ip", + | "ranges": [ + | { + | "to": "10.0.0.5" + | }, + | { + | "from": "10.0.0.5" + | } + | ] + | } + | } + |} + |""".stripMargin + + assert(agg.toJson)(equalTo(expectedJson.toJson)) + }, + test("ip_range aggregation with CIDR masks and keyed = true") { + val agg = IpRange( + name = "ip_range_agg", + field = "ip", + ranges = Chunk( + IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), + IpRange.IpRangeBound(mask = Some("10.0.0.127/25")) + ), + keyed = Some(true), + subAggregations = None + ) + + val expectedJson = + """ + |{ + | "ip_range_agg": { + | "ip_range": { + | "field": "ip", + | "ranges": [ + | { + | "mask": "10.0.0.0/25" + | }, + | { + | "mask": "10.0.0.127/25" + | } + | ], + | "keyed": true + | } + | } + |} + |""".stripMargin + + assert(agg.toJson)(equalTo(expectedJson.toJson)) + }, test("max") { val aggregation = maxAggregation("aggregation", "testField") val aggregationTs = maxAggregation("aggregation", TestDocument.intField) From ccd21f7e3060313398e604fd4fdc5633a11f8bc6 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Wed, 18 Jun 2025 17:40:10 +0200 Subject: [PATCH 08/15] Remove strange chars. --- .../zio/elasticsearch/IntegrationSpec.scala | 226 +++++++++--------- .../zio/elasticsearch/FieldDSLSpec.scala | 174 +++++++------- .../zio/elasticsearch/IndexNameSpec.scala | 192 +++++++-------- 3 files changed, 296 insertions(+), 296 deletions(-) diff --git a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala index 62fa4dd6c..a67d3ea25 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala @@ -1,113 +1,113 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import sttp.client4.httpclient.zio.HttpClientZioBackend -import zio._ -import zio.elasticsearch.ElasticQuery.matchAll -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain._ -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.utils.unsafeWrap -import zio.test.Assertion.{containsString, hasMessage} -import zio.test.CheckVariants.CheckN -import zio.test.TestAspect.beforeAll -import zio.test.{Assertion, Gen, TestAspect, ZIOSpecDefault, checkN} - -import java.time.LocalDate - -trait IntegrationSpec extends ZIOSpecDefault { - - val elasticsearchLayer: TaskLayer[Executor] = HttpClientZioBackend.layer() >>> ElasticExecutor.local - - val index: IndexName = IndexName("users") - - val deleteByQueryIndex: IndexName = IndexName("delete-by-query-index") - - val firstSearchIndex: IndexName = IndexName("search-index-1") - - val secondSearchIndex: IndexName = IndexName("search-index-2") - - val createIndexTestName: IndexName = IndexName("create-index-test-name") - - val firstCountIndex: IndexName = IndexName("count-index-1") - - val secondCountIndex: IndexName = IndexName("count-index-2") - - val updateByQueryIndex: IndexName = IndexName("update-by-query-index") - - val geoDistanceIndex: IndexName = IndexName("geo-distance-index") - - val refreshFailIndex: IndexName = IndexName("refresh-fail") - - val IndexPatternAll: IndexPattern = IndexPattern("_all") - - val geoPolygonIndex: IndexName = IndexName("geo-polygon-index") - - val prepareElasticsearchIndexForTests: TestAspect[Nothing, Any, Throwable, Any] = beforeAll((for { - _ <- Executor.execute(ElasticRequest.createIndex(index)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(index, matchAll).refreshTrue) - } yield ()).provide(elasticsearchLayer)) - - def genIndexName: Gen[Any, IndexName] = - Gen.stringBounded(10, 40)(Gen.alphaChar).map(name => unsafeWrap(name.toLowerCase)(IndexName)) - - def genDocumentId: Gen[Any, DocumentId] = - Gen.stringBounded(10, 40)(Gen.alphaNumericChar).map(DocumentId(_)) - - def genGeoPoint: Gen[Any, GeoPoint] = - for { - latitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) - longitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) - } yield GeoPoint(latitude, longitude) - - def genTestDocument: Gen[Any, TestDocument] = for { - stringField <- Gen.stringBounded(5, 10)(Gen.alphaChar) - dateField <- Gen.localDate(LocalDate.parse("2010-12-02"), LocalDate.parse("2022-12-05")) - subDocumentList <- Gen.listOfBounded(1, 3)(genTestSubDocument) - intField <- Gen.int(1, 2000) - doubleField <- Gen.double(100, 2000) - booleanField <- Gen.boolean - geoPointField <- genGeoPoint - vectorField <- Gen.listOfN(5)(Gen.int(-10, 10)) - } yield TestDocument( - stringField = stringField, - dateField = dateField, - subDocumentList = subDocumentList, - intField = intField, - doubleField = doubleField, - booleanField = booleanField, - geoPointField = geoPointField, - vectorField = vectorField - ) - - def genTestSubDocument: Gen[Any, TestSubDocument] = for { - stringField1 <- Gen.stringBounded(5, 10)(Gen.alphaChar) - stringField2 <- Gen.stringBounded(5, 10)(Gen.alphaChar) - longField <- Gen.long(1, 75) - intField <- Gen.int(1, 200) - } yield TestSubDocument( - stringField = stringField1, - nestedField = TestNestedField(stringField2, longField), - intField = intField, - intFieldList = Nil - ) - - def checkOnce: CheckN = checkN(1) - - def assertException(substring: String): Assertion[Throwable] = hasMessage(containsString(substring)) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import sttp.client4.httpclient.zio.HttpClientZioBackend +import zio._ +import zio.elasticsearch.ElasticQuery.matchAll +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain._ +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.utils.unsafeWrap +import zio.test.Assertion.{containsString, hasMessage} +import zio.test.CheckVariants.CheckN +import zio.test.TestAspect.beforeAll +import zio.test.{Assertion, Gen, TestAspect, ZIOSpecDefault, checkN} + +import java.time.LocalDate + +trait IntegrationSpec extends ZIOSpecDefault { + + val elasticsearchLayer: TaskLayer[Executor] = HttpClientZioBackend.layer() >>> ElasticExecutor.local + + val index: IndexName = IndexName("users") + + val deleteByQueryIndex: IndexName = IndexName("delete-by-query-index") + + val firstSearchIndex: IndexName = IndexName("search-index-1") + + val secondSearchIndex: IndexName = IndexName("search-index-2") + + val createIndexTestName: IndexName = IndexName("create-index-test-name") + + val firstCountIndex: IndexName = IndexName("count-index-1") + + val secondCountIndex: IndexName = IndexName("count-index-2") + + val updateByQueryIndex: IndexName = IndexName("update-by-query-index") + + val geoDistanceIndex: IndexName = IndexName("geo-distance-index") + + val refreshFailIndex: IndexName = IndexName("refresh-fail") + + val IndexPatternAll: IndexPattern = IndexPattern("_all") + + val geoPolygonIndex: IndexName = IndexName("geo-polygon-index") + + val prepareElasticsearchIndexForTests: TestAspect[Nothing, Any, Throwable, Any] = beforeAll((for { + _ <- Executor.execute(ElasticRequest.createIndex(index)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(index, matchAll).refreshTrue) + } yield ()).provide(elasticsearchLayer)) + + def genIndexName: Gen[Any, IndexName] = + Gen.stringBounded(10, 40)(Gen.alphaChar).map(name => unsafeWrap(name.toLowerCase)(IndexName)) + + def genDocumentId: Gen[Any, DocumentId] = + Gen.stringBounded(10, 40)(Gen.alphaNumericChar).map(DocumentId(_)) + + def genGeoPoint: Gen[Any, GeoPoint] = + for { + latitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) + longitude <- Gen.bigDecimal(10, 90).map(_.setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble) + } yield GeoPoint(latitude, longitude) + + def genTestDocument: Gen[Any, TestDocument] = for { + stringField <- Gen.stringBounded(5, 10)(Gen.alphaChar) + dateField <- Gen.localDate(LocalDate.parse("2010-12-02"), LocalDate.parse("2022-12-05")) + subDocumentList <- Gen.listOfBounded(1, 3)(genTestSubDocument) + intField <- Gen.int(1, 2000) + doubleField <- Gen.double(100, 2000) + booleanField <- Gen.boolean + geoPointField <- genGeoPoint + vectorField <- Gen.listOfN(5)(Gen.int(-10, 10)) + } yield TestDocument( + stringField = stringField, + dateField = dateField, + subDocumentList = subDocumentList, + intField = intField, + doubleField = doubleField, + booleanField = booleanField, + geoPointField = geoPointField, + vectorField = vectorField + ) + + def genTestSubDocument: Gen[Any, TestSubDocument] = for { + stringField1 <- Gen.stringBounded(5, 10)(Gen.alphaChar) + stringField2 <- Gen.stringBounded(5, 10)(Gen.alphaChar) + longField <- Gen.long(1, 75) + intField <- Gen.int(1, 200) + } yield TestSubDocument( + stringField = stringField1, + nestedField = TestNestedField(stringField2, longField), + intField = intField, + intFieldList = Nil + ) + + def checkOnce: CheckN = checkN(1) + + def assertException(substring: String): Assertion[Throwable] = hasMessage(containsString(substring)) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala index 04d3607c5..8496ed9f6 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala @@ -1,87 +1,87 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.elasticsearch.domain.{TestNestedField, TestSubDocument} -import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assertTrue} - -object FieldDSLSpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("Field DSL")( - suite("constructing")( - test("field") { - val encodeName = "name" - - val encodeNestedNumber = "number" - val encodeNestedAddress = "address" - val encodeNestedExpectedVal = "address.number" - - val encodeSuffixName = "name" - val encodeSuffixKeyword = "keyword" - val encodeSuffixExpectedVal = "name.keyword" - - val encodeSingleFieldSuffixName = "name" - val encodeSingleFieldSuffixMultiField = "multi_field" - val encodeSingleFieldSuffixKeyword = "keyword" - val encodeSingleFieldSuffixExpectedVal = "name.multi_field.keyword" - - val encodeSingleFieldKeywordSuffixName = "name" - val encodeSingleFieldKeywordSuffixExpectedVal = "name.keyword" - - val encodeSingleFieldRawSuffixKeyword = "name" - val encodeSingleFieldRawSuffixExpectedVal = "name.raw" - - assertTrue(Field(None, encodeName).toString == encodeName) && assertTrue( - Field(Some(Field(None, encodeNestedAddress)), encodeNestedNumber).toString == encodeNestedExpectedVal - ) && assertTrue( - Field[Any, String](None, encodeSuffixName).suffix(encodeSuffixKeyword).toString == encodeSuffixExpectedVal - ) && assertTrue( - Field[Any, String](None, encodeSingleFieldSuffixName) - .suffix(encodeSingleFieldSuffixMultiField) - .suffix(encodeSingleFieldSuffixKeyword) - .toString == encodeSingleFieldSuffixExpectedVal - ) && assertTrue( - Field[Any, String]( - None, - encodeSingleFieldKeywordSuffixName - ).keyword.toString == encodeSingleFieldKeywordSuffixExpectedVal - ) && assertTrue( - Field[Any, String]( - None, - encodeSingleFieldRawSuffixKeyword - ).raw.toString == encodeSingleFieldRawSuffixExpectedVal - ) - }, - test("path") { - val singleFieldExpectedVal = "stringField" - - val nestedFieldAccessorsExpectedVal = "nestedField.longField" - - val singleFieldSuffixAccessorKeyword = "keyword" - val singleFieldSuffixAccessorExpectedVal = "stringField.keyword" - - assertTrue(TestSubDocument.stringField.toString == singleFieldExpectedVal) && assertTrue( - (TestSubDocument.nestedField / TestNestedField.longField).toString == nestedFieldAccessorsExpectedVal - ) && assertTrue( - TestSubDocument.stringField - .suffix(singleFieldSuffixAccessorKeyword) - .toString == singleFieldSuffixAccessorExpectedVal - ) - } - ) - ) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.elasticsearch.domain.{TestNestedField, TestSubDocument} +import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assertTrue} + +object FieldDSLSpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("Field DSL")( + suite("constructing")( + test("field") { + val encodeName = "name" + + val encodeNestedNumber = "number" + val encodeNestedAddress = "address" + val encodeNestedExpectedVal = "address.number" + + val encodeSuffixName = "name" + val encodeSuffixKeyword = "keyword" + val encodeSuffixExpectedVal = "name.keyword" + + val encodeSingleFieldSuffixName = "name" + val encodeSingleFieldSuffixMultiField = "multi_field" + val encodeSingleFieldSuffixKeyword = "keyword" + val encodeSingleFieldSuffixExpectedVal = "name.multi_field.keyword" + + val encodeSingleFieldKeywordSuffixName = "name" + val encodeSingleFieldKeywordSuffixExpectedVal = "name.keyword" + + val encodeSingleFieldRawSuffixKeyword = "name" + val encodeSingleFieldRawSuffixExpectedVal = "name.raw" + + assertTrue(Field(None, encodeName).toString == encodeName) && assertTrue( + Field(Some(Field(None, encodeNestedAddress)), encodeNestedNumber).toString == encodeNestedExpectedVal + ) && assertTrue( + Field[Any, String](None, encodeSuffixName).suffix(encodeSuffixKeyword).toString == encodeSuffixExpectedVal + ) && assertTrue( + Field[Any, String](None, encodeSingleFieldSuffixName) + .suffix(encodeSingleFieldSuffixMultiField) + .suffix(encodeSingleFieldSuffixKeyword) + .toString == encodeSingleFieldSuffixExpectedVal + ) && assertTrue( + Field[Any, String]( + None, + encodeSingleFieldKeywordSuffixName + ).keyword.toString == encodeSingleFieldKeywordSuffixExpectedVal + ) && assertTrue( + Field[Any, String]( + None, + encodeSingleFieldRawSuffixKeyword + ).raw.toString == encodeSingleFieldRawSuffixExpectedVal + ) + }, + test("path") { + val singleFieldExpectedVal = "stringField" + + val nestedFieldAccessorsExpectedVal = "nestedField.longField" + + val singleFieldSuffixAccessorKeyword = "keyword" + val singleFieldSuffixAccessorExpectedVal = "stringField.keyword" + + assertTrue(TestSubDocument.stringField.toString == singleFieldExpectedVal) && assertTrue( + (TestSubDocument.nestedField / TestNestedField.longField).toString == nestedFieldAccessorsExpectedVal + ) && assertTrue( + TestSubDocument.stringField + .suffix(singleFieldSuffixAccessorKeyword) + .toString == singleFieldSuffixAccessorExpectedVal + ) + } + ) + ) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala index c2de29d72..cdecc741f 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala @@ -1,96 +1,96 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.elasticsearch.utils.unsafeWrap -import zio.prelude.Validation -import zio.test.Assertion.equalTo -import zio.test._ - -object IndexNameSpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("IndexName")( - suite("constructing")( - test("fail for empty string") { - val name = "" - - assert(IndexName.make(name))(equalTo(Validation.fail(indexNameFailureMessage(name)))) - }, - test("fail for string '.'") { - val invalidName = "." - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - }, - test("fail for string containing character '*'") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"$part1*$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string containing character ':'") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"$part1:$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string containing upper letter") { - check(genString(0, 127), genString(0, 128)) { (part1, part2) => - val invalidName = s"${part1}A$part2" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("fail for string longer than 255 bytes") { - check(genString(256, 300)) { invalidName => - assert(IndexName.make(invalidName))( - equalTo(Validation.fail(indexNameFailureMessage(invalidName))) - ) - } - }, - test("fail for string starting with character '-'") { - check(genString(1, 255)) { name => - val invalidName = s"-$name" - - assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) - } - }, - test("succeed for valid string") { - check(genString(1, 255)) { name => - assert(IndexName.make(name))(equalTo(Validation.succeed(unsafeWrap(name)(IndexName)))) - } - } - ) - ) - - private def indexNameFailureMessage(name: String): String = - s"$name did not satisfy " + - s""" - | - Must be lower case only - | - Cannot include \\, /, *, ?, ", <, >, |, ` `(space character), `,`(comma), #. - | - Cannot include ":"(since 7.0). - | - Cannot be empty - | - Cannot start with -, _, +. - | - Cannot be `.` or `..`. - | - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster). - | - Names starting with . are deprecated, except for hidden indices and internal indices managed by plugins. - |""".stripMargin - - private def genString(min: Int, max: Int): Gen[Any, String] = - Gen.stringBounded(min, max)(Gen.alphaChar).map(_.toLowerCase) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.elasticsearch.utils.unsafeWrap +import zio.prelude.Validation +import zio.test.Assertion.equalTo +import zio.test._ + +object IndexNameSpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("IndexName")( + suite("constructing")( + test("fail for empty string") { + val name = "" + + assert(IndexName.make(name))(equalTo(Validation.fail(indexNameFailureMessage(name)))) + }, + test("fail for string '.'") { + val invalidName = "." + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + }, + test("fail for string containing character '*'") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"$part1*$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string containing character ':'") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"$part1:$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string containing upper letter") { + check(genString(0, 127), genString(0, 128)) { (part1, part2) => + val invalidName = s"${part1}A$part2" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("fail for string longer than 255 bytes") { + check(genString(256, 300)) { invalidName => + assert(IndexName.make(invalidName))( + equalTo(Validation.fail(indexNameFailureMessage(invalidName))) + ) + } + }, + test("fail for string starting with character '-'") { + check(genString(1, 255)) { name => + val invalidName = s"-$name" + + assert(IndexName.make(invalidName))(equalTo(Validation.fail(indexNameFailureMessage(invalidName)))) + } + }, + test("succeed for valid string") { + check(genString(1, 255)) { name => + assert(IndexName.make(name))(equalTo(Validation.succeed(unsafeWrap(name)(IndexName)))) + } + } + ) + ) + + private def indexNameFailureMessage(name: String): String = + s"$name did not satisfy " + + s""" + | - Must be lower case only + | - Cannot include \\, /, *, ?, ", <, >, |, ` `(space character), `,`(comma), #. + | - Cannot include ":"(since 7.0). + | - Cannot be empty + | - Cannot start with -, _, +. + | - Cannot be `.` or `..`. + | - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster). + | - Names starting with . are deprecated, except for hidden indices and internal indices managed by plugins. + |""".stripMargin + + private def genString(min: Int, max: Int): Gen[Any, String] = + Gen.stringBounded(min, max)(Gen.alphaChar).map(_.toLowerCase) +} From ce1129a9c3abd9c44b420b31ad47efd0d4751708 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Thu, 19 Jun 2025 14:00:25 +0200 Subject: [PATCH 09/15] Fix space in copyright and add Lf instead CRLF. --- .gitattributes | 1 + .../src/test/scala/zio/elasticsearch/IntegrationSpec.scala | 2 +- .../src/test/scala/zio/elasticsearch/FieldDSLSpec.scala | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.gitattributes b/.gitattributes index 476390ebf..a45fa8234 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ +* text=auto eol=lf sbt linguist-vendored diff --git a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala index a67d3ea25..2fa580a6f 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/IntegrationSpec.scala @@ -6,7 +6,7 @@ * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala index 8496ed9f6..a863ea7b9 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/FieldDSLSpec.scala @@ -4,9 +4,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From 9534345c5d0b137300b91d7c5288f41dcc45a28c Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Thu, 19 Jun 2025 14:12:21 +0200 Subject: [PATCH 10/15] Fix space in copyright and strange chars. --- .../zio/elasticsearch/ElasticQuerySpec.scala | 9356 ++++++++--------- .../HttpElasticExecutorSpec.scala | 596 +- .../zio/elasticsearch/IndexNameSpec.scala | 2 +- 3 files changed, 4977 insertions(+), 4977 deletions(-) diff --git a/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala b/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala index d01867e50..da509a1e2 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/ElasticQuerySpec.scala @@ -1,4678 +1,4678 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticHighlight.highlight -import zio.elasticsearch.ElasticQuery.{script => _, _} -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain._ -import zio.elasticsearch.query.DistanceType.Plane -import zio.elasticsearch.query.DistanceUnit.Kilometers -import zio.elasticsearch.query.FunctionScoreFunction._ -import zio.elasticsearch.query.MultiMatchType._ -import zio.elasticsearch.query.MultiValueMode.Max -import zio.elasticsearch.query.ValidationMethod.IgnoreMalformed -import zio.elasticsearch.query._ -import zio.elasticsearch.script.{Painless, Script} -import zio.elasticsearch.utils._ -import zio.test.Assertion.equalTo -import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assert} - -import java.time.LocalDate - -object ElasticQuerySpec extends ZIOSpecDefault { - def spec: Spec[TestEnvironment, Any] = - suite("ElasticQuery")( - suite("constructing")( - suite("bool")( - test("filter") { - val query = filter(matches(TestDocument.stringField, "test"), matches(field = "testField", "test field")) - val queryWithBoost = - filter(matches(TestDocument.stringField, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "intField", value = 22) - ), - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("must") { - val query = must(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - must(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("mustNot") { - val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - should = Chunk.empty, - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) - }, - test("should") { - val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - val queryWithMinimumShouldMatch = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).minimumShouldMatch(2) - val queryWithAllParams = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).boost(3.14).minimumShouldMatch(2) - - assert(query)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField", value = "test"), - Match(field = "testField", value = "test field") - ), - boost = None, - minimumShouldMatch = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22) - ), - boost = Some(10.21), - minimumShouldMatch = None - ) - ) - ) && assert(queryWithMinimumShouldMatch)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22), - Exists(field = "booleanField", boost = None) - ), - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk.empty, - mustNot = Chunk.empty, - should = Chunk( - Match(field = "stringField.keyword", value = "test"), - Match(field = "intField", value = 22), - Exists(field = "booleanField", boost = None) - ), - boost = Some(3.14), - minimumShouldMatch = Some(2) - ) - ) - ) - }, - test("filter + must + mustNot + should") { - val query1 = - filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) - val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) - .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) - val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) - .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) - .mustNot(matches(TestDocument.intField, 50)) - val queryWithBoost = query1.boost(3.14) - val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) - val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) - - assert(query1)( - equalTo( - Bool[TestDocument]( - filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), - must = Chunk(Match(field = "booleanField", value = true)), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(query2)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk( - Match(field = "doubleField", value = 3.14), - Match(field = "testField", value = true), - Exists(field = "anotherTestField", boost = None) - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(query3)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk(Match(field = "intField", value = 50)), - should = Chunk( - Range( - field = "intField", - lower = GreaterThan(1), - upper = LessThanOrEqualTo(100), - boost = None, - format = None - ), - Match(field = "stringField", value = "test") - ), - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - Bool[TestDocument]( - filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), - must = Chunk(Match(field = "booleanField", value = true)), - mustNot = Chunk.empty, - should = Chunk.empty, - boost = Some(3.14), - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk( - Match(field = "doubleField", value = 3.14), - Match(field = "testField", value = true), - Exists(field = "anotherTestField", boost = None) - ), - should = Chunk.empty, - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - Bool[TestDocument]( - filter = Chunk.empty, - must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), - mustNot = Chunk(Match(field = "intField", value = 50)), - should = Chunk( - Range( - field = "intField", - lower = GreaterThan(1), - upper = LessThanOrEqualTo(100), - boost = None, - format = None - ), - Match(field = "stringField", value = "test") - ), - boost = Some(3.14), - minimumShouldMatch = Some(3) - ) - ) - ) - } - ), - test("boosting") { - val query = boosting(0.5f, exists("testField"), terms("booleanField", true, false)) - val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) - - assert(query)( - equalTo( - Boosting[Any]( - negativeBoost = 0.5f, - negativeQuery = exists("testField"), - positiveQuery = terms("booleanField", true, false) - ) - ) - ) && assert(queryTs)( - equalTo( - Boosting[TestDocument]( - negativeBoost = 0.5f, - negativeQuery = exists(TestDocument.stringField), - positiveQuery = terms(TestDocument.booleanField, true, false) - ) - ) - ) - }, - test("constantScore") { - val query = constantScore(terms("stringField", "a", "b", "c")) - val queryTs = constantScore(terms(TestDocument.stringField, "a", "b", "c")) - val queryWithBoost = constantScore(terms(TestDocument.stringField, "a", "b", "c")).boost(2.2) - - assert(query)( - equalTo( - ConstantScore[Any]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = None - ) - ) - ) && - assert(queryTs)( - equalTo( - ConstantScore[TestDocument]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - ConstantScore[TestDocument]( - Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), - boost = Some(2.2) - ) - ) - ) - }, - test("contains") { - val query = contains("testField", "test") - val queryTs = contains(TestDocument.stringField, "test") - val queryWithSuffix = contains(TestDocument.stringField.raw, "test") - val queryWithBoost = contains(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = contains(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "*test*", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "*test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "*test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = None, - caseInsensitive = Some(true) - ) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "*test*", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("disjunctionMax") { - val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) - val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) - val queryWithTieBreaker = disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) - - assert(query)( - equalTo( - DisjunctionMax[Any]( - queries = - Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = None - ) - ) - ) && - assert(queryTs)( - equalTo( - DisjunctionMax[TestDocument]( - queries = - Chunk(Exists[Any](field = "stringField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = None - ) - ) - ) && - assert(queryWithTieBreaker)( - equalTo( - DisjunctionMax[Any]( - queries = - Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), - tieBreaker = Some(0.5f) - ) - ) - ) - }, - test("exists") { - val query = exists("testField") - val queryTs = exists(TestDocument.intField) - val queryWithBoost = exists(TestDocument.intField).boost(3) - - assert(query)(equalTo(Exists[Any](field = "testField", boost = None))) && - assert(queryTs)(equalTo(Exists[TestDocument](field = "intField", boost = None))) && - assert(queryWithBoost)(equalTo(Exists[TestDocument](field = "intField", boost = Some(3)))) - - }, - test("functionScore") { - val scriptScore = scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")) - val weight = weightFunction(10.0) - val randomScore = randomScoreFunction() - val fieldValue = fieldValueFactor(TestDocument.stringField) - val decay = expDecayFunction("field", origin = "11, 12", scale = "2km") - val typedDecay = expDecayFunction(TestDocument.intField, origin = "11,12", scale = "2km") - - val fullQuery: FunctionScoreQuery[TestDocument] = functionScore(scriptScore, weight, randomScore) - .withFunctions(decay) - .withFunctions(fieldValue) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Avg) - .maxBoost(42) - .minScore(32) - .query(matches("stringField", "value")) - .scoreMode(FunctionScoreScoreMode.Min) - - val queryWithType: FunctionScoreQuery[TestDocument] = - functionScore(fieldValue).query(matches(TestDocument.stringField, "value")) - val queryTypeShrink: FunctionScoreQuery[TestDocument] = - functionScore(scriptScore).query(matches(TestDocument.stringField, "value")) - val queryWithoutTypeShrink: FunctionScoreQuery[Any] = - functionScore(scriptScore).query(matches("stringField", "value")) - val queryWithNewAnyQuery: FunctionScoreQuery[TestDocument] = - functionScore(fieldValue).query(matches("stringField", "value")) - - val anyQueryWithNewTypedFunction = functionScore(scriptScore).withFunctions(fieldValue) - val anyQueryWithNewAnyFunction = functionScore(scriptScore).withFunctions(weight) - val typedQueryWithNewTypedFunction = functionScore(fieldValue).withFunctions(typedDecay) - val typedQueryWithNewAnyFunction = functionScore(fieldValue).withFunctions(weight) - - assert(fullQuery)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk( - scriptScore, - weight, - randomScore, - decay, - fieldValue - ), - boost = Some(2.0), - boostMode = Some(FunctionScoreBoostMode.Avg), - maxBoost = Some(42.0), - minScore = Some(32.0), - query = Some(Match("stringField", "value")), - scoreMode = Some(FunctionScoreScoreMode.Min) - ) - ) - ) && - assert(queryTypeShrink)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(scriptScore), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithType)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithoutTypeShrink)( - equalTo( - FunctionScore[Any]( - functionScoreFunctions = Chunk(scriptScore), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(queryWithNewAnyQuery)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = Some(Match("stringField", "value")), - scoreMode = None - ) - ) - ) && - assert(anyQueryWithNewTypedFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(scriptScore, fieldValue), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(anyQueryWithNewAnyFunction)( - equalTo( - FunctionScore[Any]( - functionScoreFunctions = Chunk(scriptScore, weight), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(typedQueryWithNewTypedFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue, typedDecay), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) && - assert(typedQueryWithNewAnyFunction)( - equalTo( - FunctionScore[TestDocument]( - functionScoreFunctions = Chunk(fieldValue, weight), - boost = None, - boostMode = None, - maxBoost = None, - minScore = None, - query = None, - scoreMode = None - ) - ) - ) - }, - test("fuzzy") { - val query = fuzzy("stringField", "test") - val queryTs = fuzzy(TestDocument.stringField, "test") - val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") - val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) - val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) - val queryWithAllParameters = - fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) - val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") - - assert(query)( - equalTo( - Fuzzy[Any]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryTs)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithFuzzinessAuto)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = Some("AUTO"), - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithMaxExpansions)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = Some(50), - prefixLength = None - ) - ) - ) && - assert(queryWithPrefixLength)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = Some(3) - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField.raw", - value = "test", - fuzziness = None, - maxExpansions = None, - prefixLength = None - ) - ) - ) && - assert(queryWithAllParameters)( - equalTo( - Fuzzy[TestDocument]( - field = "stringField", - value = "test", - fuzziness = Some("AUTO"), - maxExpansions = Some(50), - prefixLength = Some(3) - ) - ) - ) - }, - test("geoDistance") { - val queryWithHash = - geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) - val queryWithPoint = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - val queryWithDistanceType = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) - val queryWithName = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") - val queryWithValidationMethod = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - .validationMethod(IgnoreMalformed) - .distanceType(Plane) - .name("name") - - assert(queryWithHash)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "drm3btev3e86", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = None - ) - ) - ) && - assert(queryWithPoint)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithDistanceType)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = Some(Plane), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithName)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = Some("name"), - validationMethod = None - ) - ) - ) && assert(queryWithValidationMethod)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = None, - queryName = None, - validationMethod = Some(IgnoreMalformed) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - GeoDistance[TestDocument]( - field = "geoPointField", - point = "20.0,21.1", - distance = Distance(200, Kilometers), - distanceType = Some(Plane), - queryName = Some("name"), - validationMethod = Some(IgnoreMalformed) - ) - ) - ) - }, - test("geoPolygon") { - val query = - geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) - val queryTs = - geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) - val queryWithName = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") - val queryWithValidationMethod = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) - .validationMethod(IgnoreMalformed) - .name("name") - - assert(query)( - equalTo( - GeoPolygon[Any]( - field = "testField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryTs)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("drm3btev3e86", "drm3btev3e87"), - queryName = None, - validationMethod = None - ) - ) - ) && assert(queryWithName)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = Some("name"), - validationMethod = None - ) - ) - ) && assert(queryWithValidationMethod)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = None, - validationMethod = Some(IgnoreMalformed) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - GeoPolygon[TestDocument]( - field = "stringField", - points = Chunk("40, -70", "30, -80", "20, -90"), - queryName = Some("name"), - validationMethod = Some(IgnoreMalformed) - ) - ) - ) - }, - test("hasChild") { - val query = hasChild("child", matchAll) - val queryWithIgnoreUnmapped = hasChild("child", matchAll).ignoreUnmappedTrue - val queryWithInnerHits = hasChild("child", matchAll).innerHits - val queryWithMaxChildren = hasChild("child", matchAll).maxChildren(5) - val queryWithMinChildren = hasChild("child", matchAll).minChildren(1) - val queryWithScoreMode = hasChild("child", matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = hasChild("child", matchAll) - .scoreMode(ScoreMode.Avg) - .ignoreUnmappedTrue - .innerHits - .maxChildren(5) - .minChildren(1) - - assert(query)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithIgnoreUnmapped)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = Some(true), - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithInnerHits)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = Some(InnerHits()), - maxChildren = None, - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithMaxChildren)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = Some(5), - minChildren = None, - scoreMode = None - ) - ) - ) && assert(queryWithMinChildren)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = Some(1), - scoreMode = None - ) - ) - ) && assert(queryWithScoreMode)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = None, - innerHitsField = None, - maxChildren = None, - minChildren = None, - scoreMode = Some(ScoreMode.Avg) - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - HasChild[Any]( - childType = "child", - query = matchAll, - ignoreUnmapped = Some(true), - innerHitsField = Some(InnerHits()), - maxChildren = Some(5), - minChildren = Some(1), - scoreMode = Some(ScoreMode.Avg) - ) - ) - ) - }, - test("hasParent") { - val query = hasParent("parent", matchAll) - val queryWithBoost = hasParent("parent", matchAll).boost(3) - val queryWithScoreTrue = hasParent("parent", matchAll).withScoreTrue - val queryWithScoreFalse = hasParent("parent", matchAll).withScoreFalse - val queryWithIgnoreUnmappedTrue = hasParent("parent", matchAll).ignoreUnmappedTrue - val queryWithIgnoreUnmappedFalse = hasParent("parent", matchAll).ignoreUnmappedFalse - val queryWithAllParams = hasParent("parent", matchAll).boost(3).ignoreUnmappedFalse.withScoreTrue - - assert(query)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithBoost)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = Some(3.0), - ignoreUnmapped = None, - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithScoreTrue)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = Some(true) - ) - ) - ) && assert(queryWithScoreFalse)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = None, - innerHitsField = None, - score = Some(false) - ) - ) - ) && assert(queryWithIgnoreUnmappedTrue)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = Some(true), - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithIgnoreUnmappedFalse)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = None, - ignoreUnmapped = Some(false), - innerHitsField = None, - score = None - ) - ) - ) && assert(queryWithAllParams)( - equalTo( - HasParent[Any]( - parentType = "parent", - query = matchAll, - boost = Some(3.0), - ignoreUnmapped = Some(false), - innerHitsField = None, - score = Some(true) - ) - ) - ) - }, - test("ids") { - val idsQuery = ids("1", "2", "3") - - assert(idsQuery)( - equalTo( - Ids[Any]( - values = Chunk("1", "2", "3") - ) - ) - ) - }, - test("kNN") { - val queryString = kNN("stringField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBool = kNN("boolField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryInt = kNN("intField", 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryStringTs = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBoolTs = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryIntTs = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) - - assert(queryString)( - equalTo( - KNN[Any]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryBool)( - equalTo( - KNN[Any]( - field = "boolField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryInt)( - equalTo( - KNN[Any]( - field = "intField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - KNN[TestDocument]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - KNN[TestDocument]( - field = "booleanField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - KNN[TestDocument]( - field = "intField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = None - ) - ) - ) && - assert(queryWithSimilarity)( - equalTo( - KNN[TestDocument]( - field = "stringField", - k = 5, - numCandidates = 10, - queryVector = Chunk(1.1, 2.2, 3.3), - similarity = Some(3.14) - ) - ) - ) - }, - test("matchAll") { - val query = matchAll - val queryWithBoost = matchAll.boost(3.14) - - assert(query)(equalTo(MatchAll(boost = None))) && assert(queryWithBoost)( - equalTo(MatchAll(boost = Some(3.14))) - ) - }, - test("matchBooleanPrefix") { - val queryString = matchBooleanPrefix("stringField", "test") - val queryBool = matchBooleanPrefix("booleanField", true) - val queryInt = matchBooleanPrefix("intField", 1) - val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") - val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) - val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) - val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") - val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) - - assert(queryString)( - equalTo(MatchBooleanPrefix[Any, String](field = "stringField", value = "test", minimumShouldMatch = None)) - ) && - assert(queryBool)( - equalTo(MatchBooleanPrefix[Any, Boolean](field = "booleanField", value = true, minimumShouldMatch = None)) - ) && - assert(queryInt)( - equalTo(MatchBooleanPrefix[Any, Int](field = "intField", value = 1, minimumShouldMatch = None)) - ) && - assert(queryStringTs)( - equalTo( - MatchBooleanPrefix[TestDocument, String](field = "stringField", value = "test", minimumShouldMatch = None) - ) - ) && - assert(queryBoolTs)( - equalTo( - MatchBooleanPrefix[TestDocument, Boolean](field = "booleanField", value = true, minimumShouldMatch = None) - ) - ) && - assert(queryIntTs)( - equalTo(MatchBooleanPrefix[TestDocument, Int](field = "intField", value = 1, minimumShouldMatch = None)) - ) && - assert(queryWithSuffix)( - equalTo( - MatchBooleanPrefix[TestDocument, String]( - field = "stringField.raw", - value = "test", - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - MatchBooleanPrefix[TestDocument, String]( - field = "stringField", - value = "test", - minimumShouldMatch = Some(3) - ) - ) - ) - }, - test("matches") { - val queryString = matches("stringField", "test") - val queryBool = matches("booleanField", true) - val queryInt = matches("intField", 1) - val queryStringTs = matches(TestDocument.stringField, "test") - val queryBoolTs = matches(TestDocument.booleanField, true) - val queryIntTs = matches(TestDocument.intField, 1) - val queryWithSuffix = matches(TestDocument.stringField.raw, "test") - val queryWithBoost = matches(TestDocument.doubleField, 3.14) - - assert(queryString)(equalTo(Match[Any, String](field = "stringField", value = "test"))) && - assert(queryBool)(equalTo(Match[Any, Boolean](field = "booleanField", value = true))) && - assert(queryInt)(equalTo(Match[Any, Int](field = "intField", value = 1))) && - assert(queryStringTs)( - equalTo(Match[TestDocument, String](field = "stringField", value = "test")) - ) && - assert(queryBoolTs)( - equalTo(Match[TestDocument, Boolean](field = "booleanField", value = true)) - ) && - assert(queryIntTs)(equalTo(Match[TestDocument, Int](field = "intField", value = 1))) && - assert(queryWithSuffix)( - equalTo(Match[TestDocument, String](field = "stringField.raw", value = "test")) - ) && - assert(queryWithBoost)( - equalTo(Match[TestDocument, Double](field = "doubleField", value = 3.14)) - ) - }, - test("matchPhrase") { - val query = matchPhrase("stringField", "this is a test") - val queryTs = matchPhrase(TestDocument.stringField, "this is a test") - val queryWithSuffix = matchPhrase(TestDocument.stringField.raw, "this is a test") - val queryWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) - - assert(query)(equalTo(MatchPhrase[Any](field = "stringField", value = "this is a test", boost = None))) && - assert(queryTs)( - equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = None)) - ) && - assert(queryWithSuffix)( - equalTo(MatchPhrase[TestDocument](field = "stringField.raw", value = "this is a test", boost = None)) - ) && - assert(queryWithBoost)( - equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = Some(3))) - ) - }, - test("matchPhrasePrefix") { - val query = matchPhrasePrefix("stringField", "test") - val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") - - assert(query)(equalTo(MatchPhrasePrefix[Any](field = "stringField", value = "test"))) && - assert(queryTs)(equalTo(MatchPhrasePrefix[TestDocument](field = "stringField", value = "test"))) - }, - test("multiMatch") { - val query = multiMatch("this is a test") - val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") - val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) - val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) - val queryWithType = multiMatch("this is a test").matchingType(BestFields) - val queryWithBoost = multiMatch("this is a test").boost(2.2) - val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) - val queryWithAllParams = multiMatch("this is a test") - .fields(TestDocument.stringField) - .matchingType(BestFields) - .boost(2.2) - .minimumShouldMatch(2) - - assert(query)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFields)( - equalTo( - MultiMatch[Any]( - fields = Chunk("stringField1", "stringField2"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFieldsTs)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithFieldsSuffix)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField.raw"), - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithType)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = Some(BestFields), - boost = None, - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = Some(2.2), - minimumShouldMatch = None - ) - ) - ) && - assert(queryWithMinimumShouldMatch)( - equalTo( - MultiMatch[Any]( - fields = Chunk.empty, - value = "this is a test", - matchingType = None, - boost = None, - minimumShouldMatch = Some(2) - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - MultiMatch[TestDocument]( - fields = Chunk("stringField"), - value = "this is a test", - matchingType = Some(BestFields), - boost = Some(2.2), - minimumShouldMatch = Some(2) - ) - ) - ) - }, - test("nested") { - val query = nested("testField", matchAll) - val queryTs = nested(TestDocument.subDocumentList, matchAll) - val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue - val queryWithInnerHits = - nested(TestDocument.subDocumentList, matchAll).innerHits(InnerHits().from(0).name("innerHitName").size(3)) - val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits - val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse - .innerHits(InnerHits().name("innerHitName")) - .scoreMode(ScoreMode.Max) - - assert(query)( - equalTo( - Nested[Any]( - path = "testField", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryTs)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryWithIgnoreUnmapped)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = Some(true), - innerHitsField = None - ) - ) - ) && - assert(queryWithInnerHits)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = Some(0), - highlights = None, - name = Some("innerHitName"), - size = Some(3) - ) - ) - ) - ) - ) && - assert(queryWithInnerHitsEmpty)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = None, - ignoreUnmapped = None, - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = None, - highlights = None, - name = None, - size = None - ) - ) - ) - ) - ) && - assert(queryWithScoreMode)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = Some(ScoreMode.Avg), - ignoreUnmapped = None, - innerHitsField = None - ) - ) - ) && - assert(queryWithAllParams)( - equalTo( - Nested[TestDocument]( - path = "subDocumentList", - query = MatchAll(boost = None), - scoreMode = Some(ScoreMode.Max), - ignoreUnmapped = Some(false), - innerHitsField = Some( - InnerHits( - excluded = Chunk(), - included = Chunk(), - from = None, - highlights = None, - name = Some("innerHitName"), - size = None - ) - ) - ) - ) - ) - }, - test("prefix") { - val query = prefix("stringField", "test") - val queryTs = prefix(TestDocument.stringField, "test") - val queryWithSuffix = prefix(TestDocument.stringField.keyword, "test") - val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue - - assert(query)( - equalTo(Prefix[Any](field = "stringField", value = "test", caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo(Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Prefix[TestDocument](field = "stringField.keyword", value = "test", caseInsensitive = None) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = Some(true)) - ) - ) - }, - test("range") { - val query = range("testField") - val queryString = range(TestDocument.stringField) - val queryInt = range(TestDocument.intField) - val queryWithSuffix = range(TestDocument.stringField.suffix("test")) - val queryLowerBound = range(TestDocument.doubleField).gt(3.14) - val queryUpperBound = range(TestDocument.doubleField).lt(10.21) - val queryInclusiveLowerBound = range(TestDocument.intField).gte(10) - val queryInclusiveUpperBound = range(TestDocument.intField).lte(21) - val queryMixedBounds = queryLowerBound.lte(21.0) - val queryWithBoostParam = queryMixedBounds.boost(2.8) - val queryWithFormatParam = range(TestDocument.dateField).gt(LocalDate.of(2023, 5, 11)).format("yyyy-MM-dd") - - assert(query)( - equalTo( - Range[Any, Any, Unbounded.type, Unbounded.type]( - field = "testField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryString)( - equalTo( - Range[TestDocument, String, Unbounded.type, Unbounded.type]( - field = "stringField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryInt)( - equalTo( - Range[TestDocument, Int, Unbounded.type, Unbounded.type]( - field = "intField", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Range[TestDocument, String, Unbounded.type, Unbounded.type]( - field = "stringField.test", - lower = Unbounded, - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryLowerBound)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], Unbounded.type]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryUpperBound)( - equalTo( - Range[TestDocument, Double, Unbounded.type, LessThan[Double]]( - field = "doubleField", - lower = Unbounded, - upper = LessThan(10.21), - boost = None, - format = None - ) - ) - ) && - assert(queryInclusiveLowerBound)( - equalTo( - Range[TestDocument, Int, GreaterThanOrEqualTo[Int], Unbounded.type]( - field = "intField", - lower = GreaterThanOrEqualTo(10), - upper = Unbounded, - boost = None, - format = None - ) - ) - ) && - assert(queryInclusiveUpperBound)( - equalTo( - Range[TestDocument, Int, Unbounded.type, LessThanOrEqualTo[Int]]( - field = "intField", - lower = Unbounded, - upper = LessThanOrEqualTo(21), - boost = None, - format = None - ) - ) - ) && - assert(queryMixedBounds)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = LessThanOrEqualTo(21.0), - boost = None, - format = None - ) - ) - ) && - assert(queryWithBoostParam)( - equalTo( - Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( - field = "doubleField", - lower = GreaterThan(3.14), - upper = LessThanOrEqualTo(21), - boost = Some(2.8), - format = None - ) - ) - ) && - assert(queryWithFormatParam)( - equalTo( - Range[TestDocument, LocalDate, GreaterThan[LocalDate], Unbounded.type]( - field = "dateField", - lower = GreaterThan(LocalDate.of(2023, 5, 11)), - upper = Unbounded, - boost = None, - format = Some("yyyy-MM-dd") - ) - ) - ) - }, - test("regexp") { - val query = regexp("stringField", "t.*st") - val queryTs = regexp(TestDocument.stringField, "t.*st") - val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue - val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") - - assert(query)(equalTo(Regexp[Any](field = "stringField", value = "t.*st", caseInsensitive = None))) && - assert(queryTs)( - equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = None)) - ) && - assert(queryWithCaseInsensitive)( - equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = Some(true))) - ) && - assert(queryWithSuffix)( - equalTo(Regexp[TestDocument](field = "stringField.raw", value = "t.*st", caseInsensitive = None)) - ) - }, - test("script") { - val query = - ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) - val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) - - assert(query)( - equalTo( - zio.elasticsearch.query.Script( - script = Script( - source = "doc['day_of_week'].value > params['day']", - params = Map("day" -> 2), - lang = Some(Painless) - ), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - zio.elasticsearch.query.Script( - script = Script( - source = "doc['day_of_week'].value > 2", - params = Map.empty, - lang = None - ), - boost = Some(2.0) - ) - ) - ) - }, - test("startsWith") { - val query = startsWith("testField", "test") - val queryTs = startsWith(TestDocument.stringField, "test") - val queryWithSuffix = startsWith(TestDocument.stringField.raw, "test") - val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = startsWith(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "test*", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "test*", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test*", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = Some(true)) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test*", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("term") { - val queryString = term("stringField", "test") - val queryBool = term("booleanField", true) - val queryInt = term("intField", 1) - val queryStringTs = term(TestDocument.stringField, "test") - val queryBoolTs = term(TestDocument.booleanField, true) - val queryIntTs = term(TestDocument.intField, 1) - val queryWithSuffix = term(TestDocument.stringField.keyword, "test") - val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(queryString)( - equalTo(Term[Any, String](field = "stringField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryBool)( - equalTo(Term[Any, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None)) - ) && - assert(queryInt)( - equalTo(Term[Any, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) - ) && - assert(queryStringTs)( - equalTo( - Term[TestDocument, String](field = "stringField", value = "test", boost = None, caseInsensitive = None) - ) - ) && - assert(queryBoolTs)( - equalTo( - Term[TestDocument, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None) - ) - ) && - assert(queryIntTs)( - equalTo(Term[TestDocument, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Term[TestDocument, String]( - field = "stringField.keyword", - value = "test", - boost = None, - caseInsensitive = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = Some(10.21), - caseInsensitive = None - ) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = None, - caseInsensitive = Some(true) - ) - ) - ) && - assert(queryAllParams)( - equalTo( - Term[TestDocument, String]( - field = "stringField", - value = "test", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - }, - test("terms") { - val queryString = terms("stringField", "a", "b", "c") - val queryBool = terms("booleanField", true, false) - val queryInt = terms("intField", 1, 2, 3) - val queryStringTs = terms(TestDocument.stringField, "a", "b", "c") - val queryBoolTs = terms(TestDocument.booleanField, true, false) - val queryIntTs = terms(TestDocument.intField, 1, 2, 3) - val queryWithSuffix = terms(TestDocument.stringField.keyword, "a", "b", "c") - val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) - - assert(queryString)( - equalTo(Terms[Any, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) - ) && - assert(queryBool)( - equalTo(Terms[Any, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) - ) && - assert(queryInt)( - equalTo(Terms[Any, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) - ) && - assert(queryStringTs)( - equalTo(Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) - ) && - assert(queryBoolTs)( - equalTo(Terms[TestDocument, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) - ) && - assert(queryIntTs)( - equalTo(Terms[TestDocument, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Terms[TestDocument, String](field = "stringField.keyword", values = Chunk("a", "b", "c"), boost = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = Some(10.21)) - ) - ) - }, - test("termsSet") { - val queryString = - termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") - val queryBool = - termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) - val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) - val queryStringTs = termsSet( - field = TestDocument.stringField, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSet( - field = TestDocument.booleanField, - minimumShouldMatchField = TestDocument.booleanField, - terms = true, - false - ) - val queryIntTs = - termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) - val queryWithSuffix = - termsSet( - field = TestDocument.stringField.keyword, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryWithBoost = - termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) - - assert(queryString)( - equalTo( - TermsSet[Any, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryBool)( - equalTo( - TermsSet[Any, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryInt)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("stringField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - TermsSet[TestDocument, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = Some("booleanField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - TermsSet[TestDocument, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("intField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField.keyword", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = Some("stringField"), - minimumShouldMatchScript = None, - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = Some("required_matches"), - minimumShouldMatchScript = None, - boost = Some(10.0) - ) - ) - ) - }, - test("termsSetScript") { - val queryString = termsSetScript( - field = "stringField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBool = termsSetScript( - field = "booleanField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryInt = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ) - val queryStringTs = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSetScript( - field = TestDocument.booleanField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryIntTs = termsSetScript( - field = TestDocument.intField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ) - val queryWithSuffix = - termsSetScript( - field = TestDocument.stringField.keyword, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryWithBoost = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3 - ).boost(10.0) - - assert(queryString)( - equalTo( - TermsSet[Any, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryBool)( - equalTo( - TermsSet[Any, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryInt)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryStringTs)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryBoolTs)( - equalTo( - TermsSet[TestDocument, Boolean]( - field = "booleanField", - terms = Chunk(true, false), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryIntTs)( - equalTo( - TermsSet[TestDocument, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryWithSuffix)( - equalTo( - TermsSet[TestDocument, String]( - field = "stringField.keyword", - terms = Chunk("a", "b", "c"), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = None - ) - ) - ) && - assert(queryWithBoost)( - equalTo( - TermsSet[Any, Int]( - field = "intField", - terms = Chunk(1, 2, 3), - minimumShouldMatchField = None, - minimumShouldMatchScript = Some(Script("doc['intField'].value")), - boost = Some(10.0) - ) - ) - ) - }, - test("wildcard") { - val query = wildcard("testField", "test") - val queryTs = wildcard(TestDocument.stringField, "test") - val queryWithSuffix = wildcard(TestDocument.stringField.raw, "test") - val queryWithBoost = wildcard(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "test").caseInsensitiveTrue - val queryAllParams = wildcard(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - assert(query)( - equalTo(Wildcard[Any](field = "testField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryTs)( - equalTo(Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = None)) - ) && - assert(queryWithSuffix)( - equalTo( - Wildcard[TestDocument](field = "stringField.raw", value = "test", boost = None, caseInsensitive = None) - ) - ) && - assert(queryWithBoost)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test", boost = Some(10.21), caseInsensitive = None) - ) - ) && - assert(queryWithCaseInsensitive)( - equalTo( - Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = Some(true)) - ) - ) && - assert(queryAllParams)( - equalTo( - Wildcard[TestDocument]( - field = "stringField", - value = "test", - boost = Some(3.14), - caseInsensitive = Some(false) - ) - ) - ) - } - ), - suite("encoding as JSON")( - suite("bool")( - test("filter") { - val query = filter(matches(TestDocument.doubleField, 39.2)) - val queryWithBoost = filter(matches(TestDocument.booleanField, true)).boost(3.14) - - val expected = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match": { - | "doubleField": 39.2 - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("must") { - val query = must(matchPhrase(TestDocument.stringField, "test")) - val queryWithBoost = must(terms(TestDocument.stringField, "a", "b", "c")).boost(3.14) - - val expected = - """ - |{ - | "bool": { - | "must": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("mustNot") { - val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) - .boost(10.21) - - val expected = - """ - |{ - | "bool": { - | "must_not": [ - | { - | "match": { - | "stringField": "test" - | } - | }, - | { - | "match": { - | "testField": "test field" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "must_not": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | } - | ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("should") { - val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) - val queryWithBoost = - should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) - val queryWithMinimumShouldMatch = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).minimumShouldMatch(2) - val queryWithAllParams = should( - matches(TestDocument.stringField.keyword, "test"), - matches(TestDocument.intField, 22), - exists(TestDocument.booleanField) - ).boost(3.14).minimumShouldMatch(2) - - val expected = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField": "test" - | } - | }, - | { - | "match": { - | "testField": "test field" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | } - | ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | }, - | { - | "exists": { - | "field": "booleanField" - | } - | } - | ], - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "bool": { - | "should": [ - | { - | "match": { - | "stringField.keyword": "test" - | } - | }, - | { - | "match": { - | "intField": 22 - | } - | }, - | { - | "exists": { - | "field": "booleanField" - | } - | } - | ], - | "boost": 3.14, - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("filter + must + mustNot + should") { - val query1 = - filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) - val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) - .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) - val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) - .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) - .mustNot(matches(TestDocument.intField, 50)) - val queryWithBoost = query1.boost(3.14) - val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) - val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) - - val expected1 = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ], - | "must": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ] - | } - |} - |""".stripMargin - - val expected2 = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "doubleField": 3.14 - | } - | }, - | { - | "match": { - | "testField": true - | } - | }, - | { - | "exists": { - | "field": "anotherTestField" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expected3 = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "intField": 50 - | } - | } - | ], - | "should": [ - | { - | "range": { - | "intField": { - | "gt": 1, - | "lte": 100 - | } - | } - | }, - | { - | "match": { - | "stringField": "test" - | } - | } - | ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "bool": { - | "filter": [ - | { - | "match_phrase": { - | "stringField": "test" - | } - | } - | ], - | "must": [ - | { - | "match": { - | "booleanField": true - | } - | } - | ], - | "boost": 3.14 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "doubleField": 3.14 - | } - | }, - | { - | "match": { - | "testField": true - | } - | }, - | { - | "exists": { - | "field": "anotherTestField" - | } - | } - | ], - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "bool": { - | "must": [ - | { - | "terms": { - | "stringField": ["a", "b", "c"] - | } - | } - | ], - | "must_not": [ - | { - | "match": { - | "intField": 50 - | } - | } - | ], - | "should": [ - | { - | "range": { - | "intField": { - | "gt": 1, - | "lte": 100 - | } - | } - | }, - | { - | "match": { - | "stringField": "test" - | } - | } - | ], - | "boost": 3.14, - | "minimum_should_match": 3 - | } - |} - |""".stripMargin - - assert(query1.toJson(fieldPath = None))(equalTo(expected1.toJson)) && - assert(query2.toJson(fieldPath = None))(equalTo(expected2.toJson)) && - assert(query3.toJson(fieldPath = None))(equalTo(expected3.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - } - ), - test("boosting") { - val query = boosting(0.5f, exists("stringField"), terms("booleanField", true, false)) - val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) - - val expected = - """ - |{ - | "boosting": { - | "positive": { - | "terms": { - | "booleanField": [ true, false ] - | } - | }, - | "negative": { - | "exists": { - | "field": "stringField" - | } - | }, - | "negative_boost": 0.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("constantScore") { - val query = constantScore(matchPhrase("stringField", "test")) - val queryTs = constantScore(matchPhrase(TestDocument.stringField, "test")) - val queryWithBoost = constantScore(matchPhrase(TestDocument.stringField, "test")).boost(1.5) - - val expected = - """ - |{ - | "constant_score": { - | "filter": { - | "match_phrase": { - | "stringField": "test" - | } - | } - | } - |} - |""".stripMargin - val expectedWithBoost = - """ - |{ - | "constant_score": { - | "filter": { - | "match_phrase": { - | "stringField": "test" - | } - | }, - | "boost": 1.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("contains") { - val query = contains(TestDocument.stringField, "test") - val queryWithBoost = contains(TestDocument.stringField, "test").boost(3.14) - val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = contains(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "*test*", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("disjunctionMax") { - val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) - val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) - val queryWithTieBreaker = - disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) - - val expected = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "existsField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ] - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "stringField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ] - | } - |} - |""".stripMargin - - val expectedWithTieBreaker = - """ - |{ - | "dis_max": { - | "queries": [ - | { "exists": { "field": "existsField" } }, - | { "ids": { "values": ["1", "2", "3"] } } - | ], - | "tie_breaker": 0.5 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryWithTieBreaker.toJson(fieldPath = None))(equalTo(expectedWithTieBreaker.toJson)) - }, - test("exists") { - val query = exists("testField") - val queryTs = exists(TestDocument.dateField) - val queryTsWithBoost = exists(TestDocument.dateField).boost(3) - - val expected = - """ - |{ - | "exists": { - | "field": "testField" - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "exists": { - | "field": "dateField" - | } - |} - |""".stripMargin - - val expectedTsWithBoost = - """ - |{ - | "exists": { - | "field": "dateField", - | "boost": 3.0 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryTsWithBoost.toJson(fieldPath = None))(equalTo(expectedTsWithBoost.toJson)) - }, - test("functionScore") { - val query = functionScore( - scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")), - randomScoreFunction().weight(2.0), - expDecayFunction("field", origin = "2013-09-17", scale = "10d") - .offset("5d") - .multiValueMode(Max) - .weight(10.0) - ) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Avg) - .maxBoost(42) - .minScore(32) - .query(matches("stringField", "string")) - .scoreMode(FunctionScoreScoreMode.Min) - - val expected = - """ - |{ - | "function_score": { - | "query" : { "match": { "stringField" : "string" } }, - | "score_mode": "min", - | "boost": 2.0, - | "boost_mode": "avg", - | "max_boost": 42.0, - | "min_score": 32.0, - | "functions": [ - | { - | "script_score": { - | "script": { - | "source": "params.agg1 + params.agg2 > 10" - | } - | } - | }, - | { - | "random_score": {}, - | "weight": 2.0 - | }, - | { - | "exp": { - | "field": { - | "origin": "2013-09-17", - | "scale": "10d", - | "offset": "5d" - | }, - | "multi_value_mode": "max" - | }, - | "weight": 10.0 - | } - | ] - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("fuzzy") { - val query = fuzzy("stringField", "test") - val queryTs = fuzzy(TestDocument.stringField, "test") - val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") - val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) - val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) - val queryWithAllParameters = - fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) - val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") - - val expected = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedWithFuzzinessAuto = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "fuzziness": "AUTO" - | } - | } - |} - |""".stripMargin - - val expectedWithMaxExpansions = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "max_expansions": 50 - | } - | } - |} - |""".stripMargin - - val expectedWithPrefixLength = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "prefix_length": 3 - | } - | } - |} - |""".stripMargin - - val expectedWithAllParameters = - """ - |{ - | "fuzzy": { - | "stringField": { - | "value": "test", - | "fuzziness": "AUTO", - | "max_expansions": 50, - | "prefix_length": 3 - | } - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "fuzzy": { - | "stringField.raw": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithFuzzinessAuto.toJson(fieldPath = None))(equalTo(expectedWithFuzzinessAuto.toJson)) && - assert(queryWithMaxExpansions.toJson(fieldPath = None))(equalTo(expectedWithMaxExpansions.toJson)) && - assert(queryWithPrefixLength.toJson(fieldPath = None))(equalTo(expectedWithPrefixLength.toJson)) && - assert(queryWithAllParameters.toJson(fieldPath = None))(equalTo(expectedWithAllParameters.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) - }, - test("geoDistance") { - val queryWithHash = - geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) - val queryWithPoint = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - val queryWithDistanceType = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) - val queryWithName = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") - val queryWithValidationMethod = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) - .validationMethod(IgnoreMalformed) - .distanceType(Plane) - .name("name") - - val expectedWithHash = - """ - |{ - | "geo_distance": { - | "geoPointField": "drm3btev3e86", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithDistance = - """ - |{ - | "geo_distance": { - | "distance": "200.0km", - | "geoPointField": "20.0,21.1" - | } - |} - |""".stripMargin - - val expectedWithDistanceType = - """ - |{ - | "geo_distance": { - | "distance_type" : "plane", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithName = - """ - |{ - | "geo_distance": { - | "_name": "name", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithValidationMethod = - """ - |{ - | "geo_distance": { - | "validation_method": "IGNORE_MALFORMED", - | "geoPointField": "20.0,21.1", - | "distance": "200.0km" - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "geo_distance": { - | "validation_method": "IGNORE_MALFORMED", - | "distance_type" : "plane", - | "_name": "name", - | "distance": "200.0km", - | "geoPointField": "20.0,21.1" - | } - |} - |""".stripMargin - - assert(queryWithHash.toJson(fieldPath = None))(equalTo(expectedWithHash.toJson)) && - assert(queryWithPoint.toJson(fieldPath = None))(equalTo(expectedWithDistance.toJson)) && - assert(queryWithDistanceType.toJson(fieldPath = None))(equalTo(expectedWithDistanceType.toJson)) && - assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && - assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("geoPolygon") { - val query = - geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) - val queryTs = - geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) - val queryWithName = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") - val queryWithValidationMethod = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( - IgnoreMalformed - ) - val queryWithAllParams = - geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) - .validationMethod(IgnoreMalformed) - .name("name") - - val expected = - """ - |{ - | "geo_polygon": { - | "testField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedTs = - """ - |{ - | "geo_polygon": { - | "stringField": { - | "points": ["drm3btev3e86", "drm3btev3e87"] - | } - | } - |} - |""".stripMargin - - val expectedWithName = - """ - |{ - | "geo_polygon": { - | "_name": "name", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedWithValidationMethod = - """ - |{ - | "geo_polygon": { - | "validation_method": "IGNORE_MALFORMED", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "geo_polygon": { - | "validation_method": "IGNORE_MALFORMED", - | "_name": "name", - | "stringField": { - | "points": ["40, -70", "30, -80", "20, -90"] - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && - assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && - assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("hasChild") { - val query = hasChild("child", matches(TestDocument.stringField, "test")) - val queryWithIgnoreUnmapped = hasChild("child", matches("field", "value")).ignoreUnmappedTrue - val queryWithInnerHits = hasChild("child", matches("field", "value")).innerHits - val queryWithMaxChildren = hasChild("child", matches("field", "value")).maxChildren(5) - val queryWithMinChildren = hasChild("child", matches("field", "value")).minChildren(1) - val queryWithScoreMode = hasChild("child", matches("field", "value")).scoreMode(ScoreMode.Avg) - val queryWithAllParams = hasChild("child", matches("field", "value")) - .scoreMode(ScoreMode.Avg) - .ignoreUnmappedTrue - .innerHits - .maxChildren(5) - .minChildren(1) - - val expected = - """ - |{ - | "has_child": { - | "type": "child", - | "query": { - | "match": { - | "stringField" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "has_child": { - | "type": "child", - | "ignore_unmapped": true, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "has_child": { - | "type": "child", - | "inner_hits": {}, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithMaxChildren = - """ - |{ - | "has_child": { - | "type": "child", - | "max_children": 5, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithMinChildren = - """ - |{ - | "has_child": { - | "type": "child", - | "min_children": 1, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithScoreMode = - """ - |{ - | "has_child": { - | "type": "child", - | "score_mode": "avg", - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "has_child": { - | "type": "child", - | "score_mode": "avg", - | "ignore_unmapped": true, - | "inner_hits": {}, - | "max_children": 5, - | "min_children": 1, - | "query": { - | "match": { - | "field" : "value" - | } - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithMaxChildren.toJson(fieldPath = None))(equalTo(expectedWithMaxChildren.toJson)) && - assert(queryWithMinChildren.toJson(fieldPath = None))(equalTo(expectedWithMinChildren.toJson)) && - assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("hasParent") { - val query = hasParent("parent", matches(TestDocument.stringField, "test")) - val queryWithBoost = hasParent("parent", matches(TestDocument.stringField, "test")).boost(3) - val queryWithScore = hasParent("parent", matches("field", "test")).withScoreFalse - val queryWithIgnoreUnmapped = hasParent("parent", matches("field", "test")).ignoreUnmappedFalse - val queryWithScoreAndIgnoreUnmapped = - hasParent("parent", matches("field", "test")).withScoreTrue.ignoreUnmappedTrue - val queryWithInnerHits = hasParent("parent", matches("field", "test")).innerHits - val queryWithAllParams = hasParent("parent", matches(TestDocument.stringField, "test")) - .boost(3) - .withScoreFalse - .ignoreUnmappedFalse - .innerHits - val expected = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | }, - | "boost": 3.0 - | } - |} - |""".stripMargin - - val expectedWithScore = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "score": false, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "ignore_unmapped": false, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithScoreAndIgnoreUnmapped = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "score": true, - | "ignore_unmapped": true, - | "query": { - | "match": { - | "field" : "test" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "field" : "test" - | } - | }, - | "inner_hits": {} - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "has_parent": { - | "parent_type": "parent", - | "query": { - | "match": { - | "stringField" : "test" - | } - | }, - | "boost": 3.0, - | "ignore_unmapped": false, - | "score": false, - | "inner_hits": {} - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithScore.toJson(fieldPath = None))(equalTo(expectedWithScore.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithScoreAndIgnoreUnmapped.toJson(fieldPath = None))( - equalTo(expectedWithScoreAndIgnoreUnmapped.toJson) - ) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("ids") { - val query = ids("1", "2", "3") - - val expected = - """ - |{ - | "ids": { - | "values": ["1", "2", "3"] - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("kNN") { - val queryString = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryBool = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryInt = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) - val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) - - val expectedString = - """ - |{ - | "field": "stringField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "field": "booleanField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "field": "intField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10 - |} - |""".stripMargin - - val expectedWithSimilarity = - """ - |{ - | "field": "stringField", - | "query_vector": [1.1, 2.2, 3.3], - | "k": 5, - | "num_candidates": 10, - | "similarity": 3.14 - |} - |""".stripMargin - - assert(queryString.toJson)(equalTo(expectedString.toJson)) && - assert(queryBool.toJson)(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson)(equalTo(expectedInt.toJson)) && - assert(queryWithSimilarity.toJson)(equalTo(expectedWithSimilarity.toJson)) - }, - test("matchAll") { - val query = matchAll - val queryWithBoost = matchAll.boost(3.14) - - val expected = - """ - |{ - | "match_all": {} - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "match_all": { - | "boost": 3.14 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("matchBooleanPrefix") { - val queryString = matchBooleanPrefix("stringField", "test") - val queryBool = matchBooleanPrefix("booleanField", true) - val queryInt = matchBooleanPrefix("intField", 1) - val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") - val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) - val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) - val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") - val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) - - val expectedString = - """ - |{ - | "match_bool_prefix": { - | "stringField": "test" - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "match_bool_prefix": { - | "booleanField": true - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "match_bool_prefix": { - | "intField": 1 - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "match_bool_prefix": { - | "stringField.raw": "test" - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "match_bool_prefix": { - | "stringField": { - | "query": "test", - | "minimum_should_match": 3 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))(equalTo(expectedWithMinimumShouldMatch.toJson)) - }, - test("matches") { - val query = matches("testField", true) - val queryTsInt = matches(TestDocument.intField, 39) - val queryTsString = matches(TestDocument.stringField, "test") - - val expected = - """ - |{ - | "match": { - | "testField": true - | } - |} - |""".stripMargin - - val expectedTsInt = - """ - |{ - | "match": { - | "intField": 39 - | } - |} - |""".stripMargin - - val expectedTsString = - """ - |{ - | "match": { - | "stringField": "test" - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTsInt.toJson(fieldPath = None))(equalTo(expectedTsInt.toJson)) && - assert(queryTsString.toJson(fieldPath = None))(equalTo(expectedTsString.toJson)) - }, - test("matchPhrase") { - val querySimple = matchPhrase("stringField", "this is a test") - val queryRaw = matchPhrase("stringField.raw", "this is a test") - val querySimpleTs = matchPhrase(TestDocument.stringField, "this is a test") - val queryRawTs = matchPhrase(TestDocument.stringField.raw, "this is a test") - val querySimpleTsWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) - - val expectedSimple = - """ - |{ - | "match_phrase": { - | "stringField": "this is a test" - | } - |} - |""".stripMargin - - val expectedRaw = - """ - |{ - | "match_phrase": { - | "stringField.raw": "this is a test" - | } - |} - |""".stripMargin - - val expectedSimpleTsWithBoost = - """ - |{ - | "match_phrase": { - | "stringField": { - | "query": "this is a test", - | "boost": 3.0 - | } - | } - |} - |""".stripMargin - - assert(querySimple.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && - assert(querySimpleTs.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && - assert(queryRaw.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && - assert(queryRawTs.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && - assert(querySimpleTsWithBoost.toJson(fieldPath = None))(equalTo(expectedSimpleTsWithBoost.toJson)) - }, - test("matchPhrasePrefix") { - val query = matchPhrasePrefix("stringField", "test") - val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") - - val expected = - """ - |{ - | "match_phrase_prefix": { - | "stringField": { - | "query" : "test" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) - }, - test("multiMatch") { - val query = multiMatch("this is a test") - val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") - val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) - val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) - val queryWithType = multiMatch("this is a test").matchingType(BestFields) - val queryWithBoost = multiMatch("this is a test").boost(2.2) - val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) - val queryWithAllParams = multiMatch("this is a test") - .fields(TestDocument.stringField) - .matchingType(BestFields) - .boost(2.2) - .minimumShouldMatch(2) - - val expected = - """ - |{ - | "multi_match": { - | "query": "this is a test" - | } - |} - |""".stripMargin - - val expectedWithFields = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField1", "stringField2" ] - | } - |} - |""".stripMargin - - val expectedWithFieldsTs = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField" ] - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "fields": [ "stringField.raw" ] - | } - |} - |""".stripMargin - - val expectedWithType = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "type": "best_fields" - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "boost": 2.2 - | } - |} - |""".stripMargin - - val expectedWithMinimumShouldMatch = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "multi_match": { - | "query": "this is a test", - | "type": "best_fields", - | "fields": [ "stringField" ], - | "boost": 2.2, - | "minimum_should_match": 2 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithFields.toJson(fieldPath = None))(equalTo(expectedWithFields.toJson)) && - assert(queryWithFieldsTs.toJson(fieldPath = None))(equalTo(expectedWithFieldsTs.toJson)) && - assert(queryWithFieldsSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && - assert(queryWithType.toJson(fieldPath = None))(equalTo(expectedWithType.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( - equalTo(expectedWithMinimumShouldMatch.toJson) - ) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("nested") { - val query = nested(TestDocument.subDocumentList, matchAll) - val queryWithNested = nested(TestDocument.subDocumentList, nested("items", term("testField", "test"))) - val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue - val queryWithInnerHits = - nested(TestDocument.subDocumentList, matchAll).innerHits( - InnerHits() - .from(0) - .size(3) - .name("innerHitName") - .highlights(highlight("stringField")) - .excludes("longField") - .includes("intField") - ) - val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits - val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) - val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse - .innerHits(InnerHits().from(10).size(20).name("innerHitName")) - .scoreMode(ScoreMode.Min) - - val expected = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | } - | } - |} - |""".stripMargin - - val expectedWithNested = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "nested": { - | "path": "subDocumentList.items", - | "query": { - | "term": { - | "subDocumentList.items.testField": { - | "value": "test" - | } - | } - | } - | } - | } - | } - |} - |""".stripMargin - - val expectedWithIgnoreUnmapped = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "ignore_unmapped": true - | } - |} - |""".stripMargin - - val expectedWithInnerHits = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "inner_hits": { - | "from": 0, - | "size": 3, - | "name": "innerHitName", - | "highlight" : { - | "fields" : { - | "subDocumentList.stringField" : {} - | } - | }, - | "_source" : { - | "includes" : [ - | "intField" - | ], - | "excludes" : [ - | "longField" - | ] - | } - | } - | } - |} - |""".stripMargin - - val expectedWithInnerHitsEmpty = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "inner_hits": {} - | } - |} - |""".stripMargin - - val expectedWithScoreMode = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "score_mode": "avg" - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "nested": { - | "path": "subDocumentList", - | "query": { - | "match_all": {} - | }, - | "ignore_unmapped": false, - | "score_mode": "min", - | "inner_hits": { - | "from": 10, - | "size": 20, - | "name": "innerHitName" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithNested.toJson(fieldPath = None))(equalTo(expectedWithNested.toJson)) && - assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && - assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && - assert(queryWithInnerHitsEmpty.toJson(fieldPath = None))(equalTo(expectedWithInnerHitsEmpty.toJson)) && - assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("prefix") { - val query = prefix(TestDocument.stringField, "test") - val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue - - val expected = - """ - |{ - | "prefix": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "prefix": { - | "stringField": { - | "value": "test", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) - }, - test("range") { - val queryEmpty = range(TestDocument.intField) - val queryEmptyWithBoost = range(TestDocument.intField).boost(3.14) - val queryLowerBound = range(TestDocument.intField).gt(23) - val queryUpperBound = range(TestDocument.intField).lt(45) - val queryInclusiveLowerBound = range(TestDocument.intField).gte(23) - val queryInclusiveUpperBound = range(TestDocument.intField).lte(45) - val queryMixedBounds = range(TestDocument.intField).gt(10).lte(99) - val queryMixedBoundsWithBoost = range(TestDocument.intField).gt(10).lte(99).boost(3.14) - val queryWithFormat = range(TestDocument.dateField).gt(LocalDate.of(2020, 1, 10)).format("yyyy-MM-dd") - - val expectedEmpty = - """ - |{ - | "range": { - | "intField": { - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "range": { - | "intField": { - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedLowerBound = - """ - |{ - | "range": { - | "intField": { - | "gt": 23 - | } - | } - |} - |""".stripMargin - - val expectedUpperBound = - """ - |{ - | "range": { - | "intField": { - | "lt": 45 - | } - | } - |} - |""".stripMargin - - val expectedInclusiveLowerBound = - """ - |{ - | "range": { - | "intField": { - | "gte": 23 - | } - | } - |} - |""".stripMargin - - val expectedInclusiveUpperBound = - """ - |{ - | "range": { - | "intField": { - | "lte": 45 - | } - | } - |} - |""".stripMargin - - val expectedMixedBounds = - """ - |{ - | "range": { - | "intField": { - | "gt": 10, - | "lte": 99 - | } - | } - |} - |""".stripMargin - - val expectedMixedBoundsWithBoost = - """ - |{ - | "range": { - | "intField": { - | "gt": 10, - | "lte": 99, - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithFormat = - """ - |{ - | "range": { - | "dateField": { - | "gt": "2020-01-10", - | "format": "yyyy-MM-dd" - | } - | } - |} - |""".stripMargin - - assert(queryEmpty.toJson(fieldPath = None))(equalTo(expectedEmpty.toJson)) && - assert(queryEmptyWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryLowerBound.toJson(fieldPath = None))(equalTo(expectedLowerBound.toJson)) && - assert(queryUpperBound.toJson(fieldPath = None))(equalTo(expectedUpperBound.toJson)) && - assert(queryInclusiveLowerBound.toJson(fieldPath = None))(equalTo(expectedInclusiveLowerBound.toJson)) && - assert(queryInclusiveUpperBound.toJson(fieldPath = None))(equalTo(expectedInclusiveUpperBound.toJson)) && - assert(queryMixedBounds.toJson(fieldPath = None))(equalTo(expectedMixedBounds.toJson)) && - assert(queryMixedBoundsWithBoost.toJson(fieldPath = None))(equalTo(expectedMixedBoundsWithBoost.toJson)) && - assert(queryWithFormat.toJson(fieldPath = None))(equalTo(expectedWithFormat.toJson)) - }, - test("regexp") { - val query = regexp("stringField", "t.*st") - val queryTs = regexp(TestDocument.stringField, "t.*st") - val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue - val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") - - val expected = - """ - |{ - | "regexp": { - | "stringField": { - | "value": "t.*st" - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "regexp": { - | "stringField": { - | "value": "t.*st", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithSuffix = - """ - |{ - | "regexp": { - | "stringField.raw": { - | "value": "t.*st" - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) - }, - test("script") { - val query = - ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) - val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) - - val expected = - """ - |{ - | "script": { - | "script": { - | "lang": "painless", - | "source": "doc['day_of_week'].value > params['day']", - | "params": { - | "day": 2 - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "script": { - | "script": { - | "source": "doc['day_of_week'].value > 2" - | }, - | "boost": 2.0 - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("startsWith") { - val query = startsWith(TestDocument.stringField, "test") - val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(3.14) - val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = startsWith(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "test*", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("term") { - val queryString = term(TestDocument.stringField, "test") - val queryBool = term(TestDocument.booleanField, true) - val queryInt = term(TestDocument.intField, 21) - val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) - val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue - val queryWithAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse - - val expectedString = - """ - |{ - | "term": { - | "stringField": { - | "value": "test" - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "term": { - | "booleanField": { - | "value": true - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "term": { - | "intField": { - | "value": 21 - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "boost": 10.21 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "term": { - | "stringField": { - | "value": "test", - | "boost": 3.14, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - }, - test("terms") { - val queryString = terms(TestDocument.stringField, "a", "b", "c") - val queryBool = terms(TestDocument.booleanField, true, false) - val queryInt = terms(TestDocument.intField, 1, 2, 3, 4) - val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) - - val expectedString = - """ - |{ - | "terms": { - | "stringField": [ "a", "b", "c" ] - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms": { - | "booleanField": [ true, false ] - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms": { - | "intField": [ 1, 2, 3, 4 ] - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms": { - | "stringField": [ "a", "b", "c" ], - | "boost": 10.21 - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("termsSet") { - val queryString = - termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") - val queryBool = - termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) - val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) - val queryStringTs = termsSet( - field = TestDocument.stringField, - minimumShouldMatchField = TestDocument.stringField, - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSet( - field = TestDocument.booleanField, - minimumShouldMatchField = TestDocument.booleanField, - terms = true, - false - ) - val queryIntTs = - termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) - val queryWithBoost = - termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) - - val expectedString = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3], - | "minimum_should_match_field": "required_matches" - | } - | } - |} - |""".stripMargin - - val expectedStringTs = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_field": "stringField" - | } - | } - |} - |""".stripMargin - - val expectedBoolTs = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_field": "booleanField" - | } - | } - |} - |""".stripMargin - - val expectedIntTs = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3 ], - | "minimum_should_match_field": "intField" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3 ], - | "minimum_should_match_field": "required_matches", - | "boost": 10.0 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) - }, - test("termsSetScript") { - val queryString = termsSetScript( - field = "stringField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBool = termsSetScript( - field = "booleanField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryInt = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ) - val queryStringTs = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = "a", - "b", - "c" - ) - val queryBoolTs = termsSetScript( - field = TestDocument.booleanField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = true, - false - ) - val queryIntTs = termsSetScript( - field = TestDocument.intField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ) - val queryWithBoost = termsSetScript( - field = "intField", - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = 1, - 2, - 3, - 4 - ).boost(10.0) - - val expectedString = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedBool = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedInt = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedStringTs = - """ - |{ - | "terms_set": { - | "stringField": { - | "terms": [ "a", "b", "c" ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedBoolTs = - """ - |{ - | "terms_set": { - | "booleanField": { - | "terms": [ true, false ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedIntTs = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | } - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "terms_set": { - | "intField": { - | "terms": [ 1, 2, 3, 4 ], - | "minimum_should_match_script": { - | "source": "doc['intField'].value" - | }, - | "boost": 10.0 - | } - | } - |} - |""".stripMargin - - assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && - assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && - assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && - assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && - assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && - assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) - }, - test("wildcard") { - val query = wildcard(TestDocument.stringField, "[a-zA-Z]+") - val queryWithBoost = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(3.14) - val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "[a-zA-Z]+").caseInsensitiveTrue - val queryWithAllParams = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(39.2).caseInsensitiveFalse - - val expected = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+" - | } - | } - |} - |""".stripMargin - - val expectedWithBoost = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "boost": 3.14 - | } - | } - |} - |""".stripMargin - - val expectedWithCaseInsensitive = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "case_insensitive": true - | } - | } - |} - |""".stripMargin - - val expectedWithAllParams = - """ - |{ - | "wildcard": { - | "stringField": { - | "value": "[a-zA-Z]+", - | "boost": 39.2, - | "case_insensitive": false - | } - | } - |} - |""".stripMargin - - assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && - assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && - assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && - assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) - } - ) - ) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticHighlight.highlight +import zio.elasticsearch.ElasticQuery.{script => _, _} +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain._ +import zio.elasticsearch.query.DistanceType.Plane +import zio.elasticsearch.query.DistanceUnit.Kilometers +import zio.elasticsearch.query.FunctionScoreFunction._ +import zio.elasticsearch.query.MultiMatchType._ +import zio.elasticsearch.query.MultiValueMode.Max +import zio.elasticsearch.query.ValidationMethod.IgnoreMalformed +import zio.elasticsearch.query._ +import zio.elasticsearch.script.{Painless, Script} +import zio.elasticsearch.utils._ +import zio.test.Assertion.equalTo +import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assert} + +import java.time.LocalDate + +object ElasticQuerySpec extends ZIOSpecDefault { + def spec: Spec[TestEnvironment, Any] = + suite("ElasticQuery")( + suite("constructing")( + suite("bool")( + test("filter") { + val query = filter(matches(TestDocument.stringField, "test"), matches(field = "testField", "test field")) + val queryWithBoost = + filter(matches(TestDocument.stringField, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "intField", value = 22) + ), + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("must") { + val query = must(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + must(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("mustNot") { + val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + should = Chunk.empty, + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) + }, + test("should") { + val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + val queryWithMinimumShouldMatch = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).minimumShouldMatch(2) + val queryWithAllParams = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).boost(3.14).minimumShouldMatch(2) + + assert(query)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField", value = "test"), + Match(field = "testField", value = "test field") + ), + boost = None, + minimumShouldMatch = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22) + ), + boost = Some(10.21), + minimumShouldMatch = None + ) + ) + ) && assert(queryWithMinimumShouldMatch)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22), + Exists(field = "booleanField", boost = None) + ), + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk.empty, + mustNot = Chunk.empty, + should = Chunk( + Match(field = "stringField.keyword", value = "test"), + Match(field = "intField", value = 22), + Exists(field = "booleanField", boost = None) + ), + boost = Some(3.14), + minimumShouldMatch = Some(2) + ) + ) + ) + }, + test("filter + must + mustNot + should") { + val query1 = + filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) + val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) + .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) + val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) + .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) + .mustNot(matches(TestDocument.intField, 50)) + val queryWithBoost = query1.boost(3.14) + val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) + val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) + + assert(query1)( + equalTo( + Bool[TestDocument]( + filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), + must = Chunk(Match(field = "booleanField", value = true)), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(query2)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk( + Match(field = "doubleField", value = 3.14), + Match(field = "testField", value = true), + Exists(field = "anotherTestField", boost = None) + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(query3)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk(Match(field = "intField", value = 50)), + should = Chunk( + Range( + field = "intField", + lower = GreaterThan(1), + upper = LessThanOrEqualTo(100), + boost = None, + format = None + ), + Match(field = "stringField", value = "test") + ), + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + Bool[TestDocument]( + filter = Chunk(MatchPhrase(field = "stringField", value = "test", boost = None)), + must = Chunk(Match(field = "booleanField", value = true)), + mustNot = Chunk.empty, + should = Chunk.empty, + boost = Some(3.14), + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk( + Match(field = "doubleField", value = 3.14), + Match(field = "testField", value = true), + Exists(field = "anotherTestField", boost = None) + ), + should = Chunk.empty, + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + Bool[TestDocument]( + filter = Chunk.empty, + must = Chunk(Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None)), + mustNot = Chunk(Match(field = "intField", value = 50)), + should = Chunk( + Range( + field = "intField", + lower = GreaterThan(1), + upper = LessThanOrEqualTo(100), + boost = None, + format = None + ), + Match(field = "stringField", value = "test") + ), + boost = Some(3.14), + minimumShouldMatch = Some(3) + ) + ) + ) + } + ), + test("boosting") { + val query = boosting(0.5f, exists("testField"), terms("booleanField", true, false)) + val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) + + assert(query)( + equalTo( + Boosting[Any]( + negativeBoost = 0.5f, + negativeQuery = exists("testField"), + positiveQuery = terms("booleanField", true, false) + ) + ) + ) && assert(queryTs)( + equalTo( + Boosting[TestDocument]( + negativeBoost = 0.5f, + negativeQuery = exists(TestDocument.stringField), + positiveQuery = terms(TestDocument.booleanField, true, false) + ) + ) + ) + }, + test("constantScore") { + val query = constantScore(terms("stringField", "a", "b", "c")) + val queryTs = constantScore(terms(TestDocument.stringField, "a", "b", "c")) + val queryWithBoost = constantScore(terms(TestDocument.stringField, "a", "b", "c")).boost(2.2) + + assert(query)( + equalTo( + ConstantScore[Any]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = None + ) + ) + ) && + assert(queryTs)( + equalTo( + ConstantScore[TestDocument]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + ConstantScore[TestDocument]( + Terms(field = "stringField", values = Chunk("a", "b", "c"), boost = None), + boost = Some(2.2) + ) + ) + ) + }, + test("contains") { + val query = contains("testField", "test") + val queryTs = contains(TestDocument.stringField, "test") + val queryWithSuffix = contains(TestDocument.stringField.raw, "test") + val queryWithBoost = contains(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = contains(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "*test*", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "*test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "*test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = None, + caseInsensitive = Some(true) + ) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "*test*", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("disjunctionMax") { + val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) + val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) + val queryWithTieBreaker = disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) + + assert(query)( + equalTo( + DisjunctionMax[Any]( + queries = + Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = None + ) + ) + ) && + assert(queryTs)( + equalTo( + DisjunctionMax[TestDocument]( + queries = + Chunk(Exists[Any](field = "stringField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = None + ) + ) + ) && + assert(queryWithTieBreaker)( + equalTo( + DisjunctionMax[Any]( + queries = + Chunk(Exists[Any](field = "existsField", boost = None), Ids[Any](values = Chunk("1", "2", "3"))), + tieBreaker = Some(0.5f) + ) + ) + ) + }, + test("exists") { + val query = exists("testField") + val queryTs = exists(TestDocument.intField) + val queryWithBoost = exists(TestDocument.intField).boost(3) + + assert(query)(equalTo(Exists[Any](field = "testField", boost = None))) && + assert(queryTs)(equalTo(Exists[TestDocument](field = "intField", boost = None))) && + assert(queryWithBoost)(equalTo(Exists[TestDocument](field = "intField", boost = Some(3)))) + + }, + test("functionScore") { + val scriptScore = scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")) + val weight = weightFunction(10.0) + val randomScore = randomScoreFunction() + val fieldValue = fieldValueFactor(TestDocument.stringField) + val decay = expDecayFunction("field", origin = "11, 12", scale = "2km") + val typedDecay = expDecayFunction(TestDocument.intField, origin = "11,12", scale = "2km") + + val fullQuery: FunctionScoreQuery[TestDocument] = functionScore(scriptScore, weight, randomScore) + .withFunctions(decay) + .withFunctions(fieldValue) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Avg) + .maxBoost(42) + .minScore(32) + .query(matches("stringField", "value")) + .scoreMode(FunctionScoreScoreMode.Min) + + val queryWithType: FunctionScoreQuery[TestDocument] = + functionScore(fieldValue).query(matches(TestDocument.stringField, "value")) + val queryTypeShrink: FunctionScoreQuery[TestDocument] = + functionScore(scriptScore).query(matches(TestDocument.stringField, "value")) + val queryWithoutTypeShrink: FunctionScoreQuery[Any] = + functionScore(scriptScore).query(matches("stringField", "value")) + val queryWithNewAnyQuery: FunctionScoreQuery[TestDocument] = + functionScore(fieldValue).query(matches("stringField", "value")) + + val anyQueryWithNewTypedFunction = functionScore(scriptScore).withFunctions(fieldValue) + val anyQueryWithNewAnyFunction = functionScore(scriptScore).withFunctions(weight) + val typedQueryWithNewTypedFunction = functionScore(fieldValue).withFunctions(typedDecay) + val typedQueryWithNewAnyFunction = functionScore(fieldValue).withFunctions(weight) + + assert(fullQuery)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk( + scriptScore, + weight, + randomScore, + decay, + fieldValue + ), + boost = Some(2.0), + boostMode = Some(FunctionScoreBoostMode.Avg), + maxBoost = Some(42.0), + minScore = Some(32.0), + query = Some(Match("stringField", "value")), + scoreMode = Some(FunctionScoreScoreMode.Min) + ) + ) + ) && + assert(queryTypeShrink)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(scriptScore), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithType)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithoutTypeShrink)( + equalTo( + FunctionScore[Any]( + functionScoreFunctions = Chunk(scriptScore), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(queryWithNewAnyQuery)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = Some(Match("stringField", "value")), + scoreMode = None + ) + ) + ) && + assert(anyQueryWithNewTypedFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(scriptScore, fieldValue), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(anyQueryWithNewAnyFunction)( + equalTo( + FunctionScore[Any]( + functionScoreFunctions = Chunk(scriptScore, weight), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(typedQueryWithNewTypedFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue, typedDecay), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) && + assert(typedQueryWithNewAnyFunction)( + equalTo( + FunctionScore[TestDocument]( + functionScoreFunctions = Chunk(fieldValue, weight), + boost = None, + boostMode = None, + maxBoost = None, + minScore = None, + query = None, + scoreMode = None + ) + ) + ) + }, + test("fuzzy") { + val query = fuzzy("stringField", "test") + val queryTs = fuzzy(TestDocument.stringField, "test") + val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") + val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) + val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) + val queryWithAllParameters = + fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) + val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") + + assert(query)( + equalTo( + Fuzzy[Any]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryTs)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithFuzzinessAuto)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = Some("AUTO"), + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithMaxExpansions)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = Some(50), + prefixLength = None + ) + ) + ) && + assert(queryWithPrefixLength)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = Some(3) + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField.raw", + value = "test", + fuzziness = None, + maxExpansions = None, + prefixLength = None + ) + ) + ) && + assert(queryWithAllParameters)( + equalTo( + Fuzzy[TestDocument]( + field = "stringField", + value = "test", + fuzziness = Some("AUTO"), + maxExpansions = Some(50), + prefixLength = Some(3) + ) + ) + ) + }, + test("geoDistance") { + val queryWithHash = + geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) + val queryWithPoint = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + val queryWithDistanceType = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) + val queryWithName = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") + val queryWithValidationMethod = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + .validationMethod(IgnoreMalformed) + .distanceType(Plane) + .name("name") + + assert(queryWithHash)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "drm3btev3e86", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = None + ) + ) + ) && + assert(queryWithPoint)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithDistanceType)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = Some(Plane), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithName)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = Some("name"), + validationMethod = None + ) + ) + ) && assert(queryWithValidationMethod)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = None, + queryName = None, + validationMethod = Some(IgnoreMalformed) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + GeoDistance[TestDocument]( + field = "geoPointField", + point = "20.0,21.1", + distance = Distance(200, Kilometers), + distanceType = Some(Plane), + queryName = Some("name"), + validationMethod = Some(IgnoreMalformed) + ) + ) + ) + }, + test("geoPolygon") { + val query = + geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) + val queryTs = + geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) + val queryWithName = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") + val queryWithValidationMethod = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) + .validationMethod(IgnoreMalformed) + .name("name") + + assert(query)( + equalTo( + GeoPolygon[Any]( + field = "testField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryTs)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("drm3btev3e86", "drm3btev3e87"), + queryName = None, + validationMethod = None + ) + ) + ) && assert(queryWithName)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = Some("name"), + validationMethod = None + ) + ) + ) && assert(queryWithValidationMethod)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = None, + validationMethod = Some(IgnoreMalformed) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + GeoPolygon[TestDocument]( + field = "stringField", + points = Chunk("40, -70", "30, -80", "20, -90"), + queryName = Some("name"), + validationMethod = Some(IgnoreMalformed) + ) + ) + ) + }, + test("hasChild") { + val query = hasChild("child", matchAll) + val queryWithIgnoreUnmapped = hasChild("child", matchAll).ignoreUnmappedTrue + val queryWithInnerHits = hasChild("child", matchAll).innerHits + val queryWithMaxChildren = hasChild("child", matchAll).maxChildren(5) + val queryWithMinChildren = hasChild("child", matchAll).minChildren(1) + val queryWithScoreMode = hasChild("child", matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = hasChild("child", matchAll) + .scoreMode(ScoreMode.Avg) + .ignoreUnmappedTrue + .innerHits + .maxChildren(5) + .minChildren(1) + + assert(query)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithIgnoreUnmapped)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = Some(true), + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithInnerHits)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = Some(InnerHits()), + maxChildren = None, + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithMaxChildren)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = Some(5), + minChildren = None, + scoreMode = None + ) + ) + ) && assert(queryWithMinChildren)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = Some(1), + scoreMode = None + ) + ) + ) && assert(queryWithScoreMode)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = None, + innerHitsField = None, + maxChildren = None, + minChildren = None, + scoreMode = Some(ScoreMode.Avg) + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + HasChild[Any]( + childType = "child", + query = matchAll, + ignoreUnmapped = Some(true), + innerHitsField = Some(InnerHits()), + maxChildren = Some(5), + minChildren = Some(1), + scoreMode = Some(ScoreMode.Avg) + ) + ) + ) + }, + test("hasParent") { + val query = hasParent("parent", matchAll) + val queryWithBoost = hasParent("parent", matchAll).boost(3) + val queryWithScoreTrue = hasParent("parent", matchAll).withScoreTrue + val queryWithScoreFalse = hasParent("parent", matchAll).withScoreFalse + val queryWithIgnoreUnmappedTrue = hasParent("parent", matchAll).ignoreUnmappedTrue + val queryWithIgnoreUnmappedFalse = hasParent("parent", matchAll).ignoreUnmappedFalse + val queryWithAllParams = hasParent("parent", matchAll).boost(3).ignoreUnmappedFalse.withScoreTrue + + assert(query)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithBoost)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = Some(3.0), + ignoreUnmapped = None, + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithScoreTrue)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = Some(true) + ) + ) + ) && assert(queryWithScoreFalse)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = None, + innerHitsField = None, + score = Some(false) + ) + ) + ) && assert(queryWithIgnoreUnmappedTrue)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = Some(true), + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithIgnoreUnmappedFalse)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = None, + ignoreUnmapped = Some(false), + innerHitsField = None, + score = None + ) + ) + ) && assert(queryWithAllParams)( + equalTo( + HasParent[Any]( + parentType = "parent", + query = matchAll, + boost = Some(3.0), + ignoreUnmapped = Some(false), + innerHitsField = None, + score = Some(true) + ) + ) + ) + }, + test("ids") { + val idsQuery = ids("1", "2", "3") + + assert(idsQuery)( + equalTo( + Ids[Any]( + values = Chunk("1", "2", "3") + ) + ) + ) + }, + test("kNN") { + val queryString = kNN("stringField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBool = kNN("boolField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryInt = kNN("intField", 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryStringTs = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBoolTs = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryIntTs = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) + + assert(queryString)( + equalTo( + KNN[Any]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryBool)( + equalTo( + KNN[Any]( + field = "boolField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryInt)( + equalTo( + KNN[Any]( + field = "intField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + KNN[TestDocument]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + KNN[TestDocument]( + field = "booleanField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + KNN[TestDocument]( + field = "intField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = None + ) + ) + ) && + assert(queryWithSimilarity)( + equalTo( + KNN[TestDocument]( + field = "stringField", + k = 5, + numCandidates = 10, + queryVector = Chunk(1.1, 2.2, 3.3), + similarity = Some(3.14) + ) + ) + ) + }, + test("matchAll") { + val query = matchAll + val queryWithBoost = matchAll.boost(3.14) + + assert(query)(equalTo(MatchAll(boost = None))) && assert(queryWithBoost)( + equalTo(MatchAll(boost = Some(3.14))) + ) + }, + test("matchBooleanPrefix") { + val queryString = matchBooleanPrefix("stringField", "test") + val queryBool = matchBooleanPrefix("booleanField", true) + val queryInt = matchBooleanPrefix("intField", 1) + val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") + val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) + val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) + val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") + val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) + + assert(queryString)( + equalTo(MatchBooleanPrefix[Any, String](field = "stringField", value = "test", minimumShouldMatch = None)) + ) && + assert(queryBool)( + equalTo(MatchBooleanPrefix[Any, Boolean](field = "booleanField", value = true, minimumShouldMatch = None)) + ) && + assert(queryInt)( + equalTo(MatchBooleanPrefix[Any, Int](field = "intField", value = 1, minimumShouldMatch = None)) + ) && + assert(queryStringTs)( + equalTo( + MatchBooleanPrefix[TestDocument, String](field = "stringField", value = "test", minimumShouldMatch = None) + ) + ) && + assert(queryBoolTs)( + equalTo( + MatchBooleanPrefix[TestDocument, Boolean](field = "booleanField", value = true, minimumShouldMatch = None) + ) + ) && + assert(queryIntTs)( + equalTo(MatchBooleanPrefix[TestDocument, Int](field = "intField", value = 1, minimumShouldMatch = None)) + ) && + assert(queryWithSuffix)( + equalTo( + MatchBooleanPrefix[TestDocument, String]( + field = "stringField.raw", + value = "test", + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + MatchBooleanPrefix[TestDocument, String]( + field = "stringField", + value = "test", + minimumShouldMatch = Some(3) + ) + ) + ) + }, + test("matches") { + val queryString = matches("stringField", "test") + val queryBool = matches("booleanField", true) + val queryInt = matches("intField", 1) + val queryStringTs = matches(TestDocument.stringField, "test") + val queryBoolTs = matches(TestDocument.booleanField, true) + val queryIntTs = matches(TestDocument.intField, 1) + val queryWithSuffix = matches(TestDocument.stringField.raw, "test") + val queryWithBoost = matches(TestDocument.doubleField, 3.14) + + assert(queryString)(equalTo(Match[Any, String](field = "stringField", value = "test"))) && + assert(queryBool)(equalTo(Match[Any, Boolean](field = "booleanField", value = true))) && + assert(queryInt)(equalTo(Match[Any, Int](field = "intField", value = 1))) && + assert(queryStringTs)( + equalTo(Match[TestDocument, String](field = "stringField", value = "test")) + ) && + assert(queryBoolTs)( + equalTo(Match[TestDocument, Boolean](field = "booleanField", value = true)) + ) && + assert(queryIntTs)(equalTo(Match[TestDocument, Int](field = "intField", value = 1))) && + assert(queryWithSuffix)( + equalTo(Match[TestDocument, String](field = "stringField.raw", value = "test")) + ) && + assert(queryWithBoost)( + equalTo(Match[TestDocument, Double](field = "doubleField", value = 3.14)) + ) + }, + test("matchPhrase") { + val query = matchPhrase("stringField", "this is a test") + val queryTs = matchPhrase(TestDocument.stringField, "this is a test") + val queryWithSuffix = matchPhrase(TestDocument.stringField.raw, "this is a test") + val queryWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) + + assert(query)(equalTo(MatchPhrase[Any](field = "stringField", value = "this is a test", boost = None))) && + assert(queryTs)( + equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = None)) + ) && + assert(queryWithSuffix)( + equalTo(MatchPhrase[TestDocument](field = "stringField.raw", value = "this is a test", boost = None)) + ) && + assert(queryWithBoost)( + equalTo(MatchPhrase[TestDocument](field = "stringField", value = "this is a test", boost = Some(3))) + ) + }, + test("matchPhrasePrefix") { + val query = matchPhrasePrefix("stringField", "test") + val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") + + assert(query)(equalTo(MatchPhrasePrefix[Any](field = "stringField", value = "test"))) && + assert(queryTs)(equalTo(MatchPhrasePrefix[TestDocument](field = "stringField", value = "test"))) + }, + test("multiMatch") { + val query = multiMatch("this is a test") + val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") + val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) + val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) + val queryWithType = multiMatch("this is a test").matchingType(BestFields) + val queryWithBoost = multiMatch("this is a test").boost(2.2) + val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) + val queryWithAllParams = multiMatch("this is a test") + .fields(TestDocument.stringField) + .matchingType(BestFields) + .boost(2.2) + .minimumShouldMatch(2) + + assert(query)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFields)( + equalTo( + MultiMatch[Any]( + fields = Chunk("stringField1", "stringField2"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFieldsTs)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithFieldsSuffix)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField.raw"), + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithType)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = Some(BestFields), + boost = None, + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = Some(2.2), + minimumShouldMatch = None + ) + ) + ) && + assert(queryWithMinimumShouldMatch)( + equalTo( + MultiMatch[Any]( + fields = Chunk.empty, + value = "this is a test", + matchingType = None, + boost = None, + minimumShouldMatch = Some(2) + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + MultiMatch[TestDocument]( + fields = Chunk("stringField"), + value = "this is a test", + matchingType = Some(BestFields), + boost = Some(2.2), + minimumShouldMatch = Some(2) + ) + ) + ) + }, + test("nested") { + val query = nested("testField", matchAll) + val queryTs = nested(TestDocument.subDocumentList, matchAll) + val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue + val queryWithInnerHits = + nested(TestDocument.subDocumentList, matchAll).innerHits(InnerHits().from(0).name("innerHitName").size(3)) + val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits + val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse + .innerHits(InnerHits().name("innerHitName")) + .scoreMode(ScoreMode.Max) + + assert(query)( + equalTo( + Nested[Any]( + path = "testField", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryTs)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryWithIgnoreUnmapped)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = Some(true), + innerHitsField = None + ) + ) + ) && + assert(queryWithInnerHits)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = Some(0), + highlights = None, + name = Some("innerHitName"), + size = Some(3) + ) + ) + ) + ) + ) && + assert(queryWithInnerHitsEmpty)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = None, + ignoreUnmapped = None, + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = None, + highlights = None, + name = None, + size = None + ) + ) + ) + ) + ) && + assert(queryWithScoreMode)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = Some(ScoreMode.Avg), + ignoreUnmapped = None, + innerHitsField = None + ) + ) + ) && + assert(queryWithAllParams)( + equalTo( + Nested[TestDocument]( + path = "subDocumentList", + query = MatchAll(boost = None), + scoreMode = Some(ScoreMode.Max), + ignoreUnmapped = Some(false), + innerHitsField = Some( + InnerHits( + excluded = Chunk(), + included = Chunk(), + from = None, + highlights = None, + name = Some("innerHitName"), + size = None + ) + ) + ) + ) + ) + }, + test("prefix") { + val query = prefix("stringField", "test") + val queryTs = prefix(TestDocument.stringField, "test") + val queryWithSuffix = prefix(TestDocument.stringField.keyword, "test") + val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue + + assert(query)( + equalTo(Prefix[Any](field = "stringField", value = "test", caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo(Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Prefix[TestDocument](field = "stringField.keyword", value = "test", caseInsensitive = None) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Prefix[TestDocument](field = "stringField", value = "test", caseInsensitive = Some(true)) + ) + ) + }, + test("range") { + val query = range("testField") + val queryString = range(TestDocument.stringField) + val queryInt = range(TestDocument.intField) + val queryWithSuffix = range(TestDocument.stringField.suffix("test")) + val queryLowerBound = range(TestDocument.doubleField).gt(3.14) + val queryUpperBound = range(TestDocument.doubleField).lt(10.21) + val queryInclusiveLowerBound = range(TestDocument.intField).gte(10) + val queryInclusiveUpperBound = range(TestDocument.intField).lte(21) + val queryMixedBounds = queryLowerBound.lte(21.0) + val queryWithBoostParam = queryMixedBounds.boost(2.8) + val queryWithFormatParam = range(TestDocument.dateField).gt(LocalDate.of(2023, 5, 11)).format("yyyy-MM-dd") + + assert(query)( + equalTo( + Range[Any, Any, Unbounded.type, Unbounded.type]( + field = "testField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryString)( + equalTo( + Range[TestDocument, String, Unbounded.type, Unbounded.type]( + field = "stringField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryInt)( + equalTo( + Range[TestDocument, Int, Unbounded.type, Unbounded.type]( + field = "intField", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Range[TestDocument, String, Unbounded.type, Unbounded.type]( + field = "stringField.test", + lower = Unbounded, + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryLowerBound)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], Unbounded.type]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryUpperBound)( + equalTo( + Range[TestDocument, Double, Unbounded.type, LessThan[Double]]( + field = "doubleField", + lower = Unbounded, + upper = LessThan(10.21), + boost = None, + format = None + ) + ) + ) && + assert(queryInclusiveLowerBound)( + equalTo( + Range[TestDocument, Int, GreaterThanOrEqualTo[Int], Unbounded.type]( + field = "intField", + lower = GreaterThanOrEqualTo(10), + upper = Unbounded, + boost = None, + format = None + ) + ) + ) && + assert(queryInclusiveUpperBound)( + equalTo( + Range[TestDocument, Int, Unbounded.type, LessThanOrEqualTo[Int]]( + field = "intField", + lower = Unbounded, + upper = LessThanOrEqualTo(21), + boost = None, + format = None + ) + ) + ) && + assert(queryMixedBounds)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = LessThanOrEqualTo(21.0), + boost = None, + format = None + ) + ) + ) && + assert(queryWithBoostParam)( + equalTo( + Range[TestDocument, Double, GreaterThan[Double], LessThanOrEqualTo[Double]]( + field = "doubleField", + lower = GreaterThan(3.14), + upper = LessThanOrEqualTo(21), + boost = Some(2.8), + format = None + ) + ) + ) && + assert(queryWithFormatParam)( + equalTo( + Range[TestDocument, LocalDate, GreaterThan[LocalDate], Unbounded.type]( + field = "dateField", + lower = GreaterThan(LocalDate.of(2023, 5, 11)), + upper = Unbounded, + boost = None, + format = Some("yyyy-MM-dd") + ) + ) + ) + }, + test("regexp") { + val query = regexp("stringField", "t.*st") + val queryTs = regexp(TestDocument.stringField, "t.*st") + val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue + val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") + + assert(query)(equalTo(Regexp[Any](field = "stringField", value = "t.*st", caseInsensitive = None))) && + assert(queryTs)( + equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = None)) + ) && + assert(queryWithCaseInsensitive)( + equalTo(Regexp[TestDocument](field = "stringField", value = "t.*st", caseInsensitive = Some(true))) + ) && + assert(queryWithSuffix)( + equalTo(Regexp[TestDocument](field = "stringField.raw", value = "t.*st", caseInsensitive = None)) + ) + }, + test("script") { + val query = + ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) + val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) + + assert(query)( + equalTo( + zio.elasticsearch.query.Script( + script = Script( + source = "doc['day_of_week'].value > params['day']", + params = Map("day" -> 2), + lang = Some(Painless) + ), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + zio.elasticsearch.query.Script( + script = Script( + source = "doc['day_of_week'].value > 2", + params = Map.empty, + lang = None + ), + boost = Some(2.0) + ) + ) + ) + }, + test("startsWith") { + val query = startsWith("testField", "test") + val queryTs = startsWith(TestDocument.stringField, "test") + val queryWithSuffix = startsWith(TestDocument.stringField.raw, "test") + val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = startsWith(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "test*", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "test*", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test*", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test*", boost = None, caseInsensitive = Some(true)) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test*", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("term") { + val queryString = term("stringField", "test") + val queryBool = term("booleanField", true) + val queryInt = term("intField", 1) + val queryStringTs = term(TestDocument.stringField, "test") + val queryBoolTs = term(TestDocument.booleanField, true) + val queryIntTs = term(TestDocument.intField, 1) + val queryWithSuffix = term(TestDocument.stringField.keyword, "test") + val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(queryString)( + equalTo(Term[Any, String](field = "stringField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryBool)( + equalTo(Term[Any, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None)) + ) && + assert(queryInt)( + equalTo(Term[Any, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) + ) && + assert(queryStringTs)( + equalTo( + Term[TestDocument, String](field = "stringField", value = "test", boost = None, caseInsensitive = None) + ) + ) && + assert(queryBoolTs)( + equalTo( + Term[TestDocument, Boolean](field = "booleanField", value = true, boost = None, caseInsensitive = None) + ) + ) && + assert(queryIntTs)( + equalTo(Term[TestDocument, Int](field = "intField", value = 1, boost = None, caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Term[TestDocument, String]( + field = "stringField.keyword", + value = "test", + boost = None, + caseInsensitive = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = Some(10.21), + caseInsensitive = None + ) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = None, + caseInsensitive = Some(true) + ) + ) + ) && + assert(queryAllParams)( + equalTo( + Term[TestDocument, String]( + field = "stringField", + value = "test", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + }, + test("terms") { + val queryString = terms("stringField", "a", "b", "c") + val queryBool = terms("booleanField", true, false) + val queryInt = terms("intField", 1, 2, 3) + val queryStringTs = terms(TestDocument.stringField, "a", "b", "c") + val queryBoolTs = terms(TestDocument.booleanField, true, false) + val queryIntTs = terms(TestDocument.intField, 1, 2, 3) + val queryWithSuffix = terms(TestDocument.stringField.keyword, "a", "b", "c") + val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) + + assert(queryString)( + equalTo(Terms[Any, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) + ) && + assert(queryBool)( + equalTo(Terms[Any, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) + ) && + assert(queryInt)( + equalTo(Terms[Any, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) + ) && + assert(queryStringTs)( + equalTo(Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = None)) + ) && + assert(queryBoolTs)( + equalTo(Terms[TestDocument, Boolean](field = "booleanField", values = Chunk(true, false), boost = None)) + ) && + assert(queryIntTs)( + equalTo(Terms[TestDocument, Int](field = "intField", values = Chunk(1, 2, 3), boost = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Terms[TestDocument, String](field = "stringField.keyword", values = Chunk("a", "b", "c"), boost = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Terms[TestDocument, String](field = "stringField", values = Chunk("a", "b", "c"), boost = Some(10.21)) + ) + ) + }, + test("termsSet") { + val queryString = + termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") + val queryBool = + termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) + val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) + val queryStringTs = termsSet( + field = TestDocument.stringField, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSet( + field = TestDocument.booleanField, + minimumShouldMatchField = TestDocument.booleanField, + terms = true, + false + ) + val queryIntTs = + termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) + val queryWithSuffix = + termsSet( + field = TestDocument.stringField.keyword, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryWithBoost = + termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) + + assert(queryString)( + equalTo( + TermsSet[Any, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryBool)( + equalTo( + TermsSet[Any, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryInt)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("stringField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + TermsSet[TestDocument, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = Some("booleanField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + TermsSet[TestDocument, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("intField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField.keyword", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = Some("stringField"), + minimumShouldMatchScript = None, + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = Some("required_matches"), + minimumShouldMatchScript = None, + boost = Some(10.0) + ) + ) + ) + }, + test("termsSetScript") { + val queryString = termsSetScript( + field = "stringField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBool = termsSetScript( + field = "booleanField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryInt = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ) + val queryStringTs = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSetScript( + field = TestDocument.booleanField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryIntTs = termsSetScript( + field = TestDocument.intField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ) + val queryWithSuffix = + termsSetScript( + field = TestDocument.stringField.keyword, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryWithBoost = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3 + ).boost(10.0) + + assert(queryString)( + equalTo( + TermsSet[Any, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryBool)( + equalTo( + TermsSet[Any, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryInt)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryStringTs)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryBoolTs)( + equalTo( + TermsSet[TestDocument, Boolean]( + field = "booleanField", + terms = Chunk(true, false), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryIntTs)( + equalTo( + TermsSet[TestDocument, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryWithSuffix)( + equalTo( + TermsSet[TestDocument, String]( + field = "stringField.keyword", + terms = Chunk("a", "b", "c"), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = None + ) + ) + ) && + assert(queryWithBoost)( + equalTo( + TermsSet[Any, Int]( + field = "intField", + terms = Chunk(1, 2, 3), + minimumShouldMatchField = None, + minimumShouldMatchScript = Some(Script("doc['intField'].value")), + boost = Some(10.0) + ) + ) + ) + }, + test("wildcard") { + val query = wildcard("testField", "test") + val queryTs = wildcard(TestDocument.stringField, "test") + val queryWithSuffix = wildcard(TestDocument.stringField.raw, "test") + val queryWithBoost = wildcard(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "test").caseInsensitiveTrue + val queryAllParams = wildcard(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + assert(query)( + equalTo(Wildcard[Any](field = "testField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryTs)( + equalTo(Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = None)) + ) && + assert(queryWithSuffix)( + equalTo( + Wildcard[TestDocument](field = "stringField.raw", value = "test", boost = None, caseInsensitive = None) + ) + ) && + assert(queryWithBoost)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test", boost = Some(10.21), caseInsensitive = None) + ) + ) && + assert(queryWithCaseInsensitive)( + equalTo( + Wildcard[TestDocument](field = "stringField", value = "test", boost = None, caseInsensitive = Some(true)) + ) + ) && + assert(queryAllParams)( + equalTo( + Wildcard[TestDocument]( + field = "stringField", + value = "test", + boost = Some(3.14), + caseInsensitive = Some(false) + ) + ) + ) + } + ), + suite("encoding as JSON")( + suite("bool")( + test("filter") { + val query = filter(matches(TestDocument.doubleField, 39.2)) + val queryWithBoost = filter(matches(TestDocument.booleanField, true)).boost(3.14) + + val expected = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match": { + | "doubleField": 39.2 + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("must") { + val query = must(matchPhrase(TestDocument.stringField, "test")) + val queryWithBoost = must(terms(TestDocument.stringField, "a", "b", "c")).boost(3.14) + + val expected = + """ + |{ + | "bool": { + | "must": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("mustNot") { + val query = mustNot(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + mustNot(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)) + .boost(10.21) + + val expected = + """ + |{ + | "bool": { + | "must_not": [ + | { + | "match": { + | "stringField": "test" + | } + | }, + | { + | "match": { + | "testField": "test field" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "must_not": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | } + | ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("should") { + val query = should(matches(TestDocument.stringField, "test"), matches("testField", "test field")) + val queryWithBoost = + should(matches(TestDocument.stringField.keyword, "test"), matches(TestDocument.intField, 22)).boost(10.21) + val queryWithMinimumShouldMatch = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).minimumShouldMatch(2) + val queryWithAllParams = should( + matches(TestDocument.stringField.keyword, "test"), + matches(TestDocument.intField, 22), + exists(TestDocument.booleanField) + ).boost(3.14).minimumShouldMatch(2) + + val expected = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField": "test" + | } + | }, + | { + | "match": { + | "testField": "test field" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | } + | ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | }, + | { + | "exists": { + | "field": "booleanField" + | } + | } + | ], + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "bool": { + | "should": [ + | { + | "match": { + | "stringField.keyword": "test" + | } + | }, + | { + | "match": { + | "intField": 22 + | } + | }, + | { + | "exists": { + | "field": "booleanField" + | } + | } + | ], + | "boost": 3.14, + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("filter + must + mustNot + should") { + val query1 = + filter(matchPhrase(TestDocument.stringField, "test")).must(matches(TestDocument.booleanField, true)) + val query2 = must(terms(TestDocument.stringField, "a", "b", "c")) + .mustNot(matches(TestDocument.doubleField, 3.14), matches("testField", true), exists("anotherTestField")) + val query3 = must(terms(TestDocument.stringField, "a", "b", "c")) + .should(range(TestDocument.intField).gt(1).lte(100), matches(TestDocument.stringField, "test")) + .mustNot(matches(TestDocument.intField, 50)) + val queryWithBoost = query1.boost(3.14) + val queryWithMinimumShouldMatch = query2.minimumShouldMatch(2) + val queryWithAllParams = query3.boost(3.14).minimumShouldMatch(3) + + val expected1 = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ], + | "must": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ] + | } + |} + |""".stripMargin + + val expected2 = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "doubleField": 3.14 + | } + | }, + | { + | "match": { + | "testField": true + | } + | }, + | { + | "exists": { + | "field": "anotherTestField" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expected3 = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "intField": 50 + | } + | } + | ], + | "should": [ + | { + | "range": { + | "intField": { + | "gt": 1, + | "lte": 100 + | } + | } + | }, + | { + | "match": { + | "stringField": "test" + | } + | } + | ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "bool": { + | "filter": [ + | { + | "match_phrase": { + | "stringField": "test" + | } + | } + | ], + | "must": [ + | { + | "match": { + | "booleanField": true + | } + | } + | ], + | "boost": 3.14 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "doubleField": 3.14 + | } + | }, + | { + | "match": { + | "testField": true + | } + | }, + | { + | "exists": { + | "field": "anotherTestField" + | } + | } + | ], + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "bool": { + | "must": [ + | { + | "terms": { + | "stringField": ["a", "b", "c"] + | } + | } + | ], + | "must_not": [ + | { + | "match": { + | "intField": 50 + | } + | } + | ], + | "should": [ + | { + | "range": { + | "intField": { + | "gt": 1, + | "lte": 100 + | } + | } + | }, + | { + | "match": { + | "stringField": "test" + | } + | } + | ], + | "boost": 3.14, + | "minimum_should_match": 3 + | } + |} + |""".stripMargin + + assert(query1.toJson(fieldPath = None))(equalTo(expected1.toJson)) && + assert(query2.toJson(fieldPath = None))(equalTo(expected2.toJson)) && + assert(query3.toJson(fieldPath = None))(equalTo(expected3.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + } + ), + test("boosting") { + val query = boosting(0.5f, exists("stringField"), terms("booleanField", true, false)) + val queryTs = boosting(0.5f, exists(TestDocument.stringField), terms(TestDocument.booleanField, true, false)) + + val expected = + """ + |{ + | "boosting": { + | "positive": { + | "terms": { + | "booleanField": [ true, false ] + | } + | }, + | "negative": { + | "exists": { + | "field": "stringField" + | } + | }, + | "negative_boost": 0.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("constantScore") { + val query = constantScore(matchPhrase("stringField", "test")) + val queryTs = constantScore(matchPhrase(TestDocument.stringField, "test")) + val queryWithBoost = constantScore(matchPhrase(TestDocument.stringField, "test")).boost(1.5) + + val expected = + """ + |{ + | "constant_score": { + | "filter": { + | "match_phrase": { + | "stringField": "test" + | } + | } + | } + |} + |""".stripMargin + val expectedWithBoost = + """ + |{ + | "constant_score": { + | "filter": { + | "match_phrase": { + | "stringField": "test" + | } + | }, + | "boost": 1.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("contains") { + val query = contains(TestDocument.stringField, "test") + val queryWithBoost = contains(TestDocument.stringField, "test").boost(3.14) + val queryWithCaseInsensitive = contains(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = contains(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "*test*", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("disjunctionMax") { + val query = disjunctionMax(exists("existsField"), ids("1", "2", "3")) + val queryTs = disjunctionMax(exists(TestDocument.stringField), ids("1", "2", "3")) + val queryWithTieBreaker = + disjunctionMax(exists("existsField"), ids("1", "2", "3")).tieBreaker(0.5f) + + val expected = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "existsField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ] + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "stringField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ] + | } + |} + |""".stripMargin + + val expectedWithTieBreaker = + """ + |{ + | "dis_max": { + | "queries": [ + | { "exists": { "field": "existsField" } }, + | { "ids": { "values": ["1", "2", "3"] } } + | ], + | "tie_breaker": 0.5 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryWithTieBreaker.toJson(fieldPath = None))(equalTo(expectedWithTieBreaker.toJson)) + }, + test("exists") { + val query = exists("testField") + val queryTs = exists(TestDocument.dateField) + val queryTsWithBoost = exists(TestDocument.dateField).boost(3) + + val expected = + """ + |{ + | "exists": { + | "field": "testField" + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "exists": { + | "field": "dateField" + | } + |} + |""".stripMargin + + val expectedTsWithBoost = + """ + |{ + | "exists": { + | "field": "dateField", + | "boost": 3.0 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryTsWithBoost.toJson(fieldPath = None))(equalTo(expectedTsWithBoost.toJson)) + }, + test("functionScore") { + val query = functionScore( + scriptScoreFunction(Script("params.agg1 + params.agg2 > 10")), + randomScoreFunction().weight(2.0), + expDecayFunction("field", origin = "2013-09-17", scale = "10d") + .offset("5d") + .multiValueMode(Max) + .weight(10.0) + ) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Avg) + .maxBoost(42) + .minScore(32) + .query(matches("stringField", "string")) + .scoreMode(FunctionScoreScoreMode.Min) + + val expected = + """ + |{ + | "function_score": { + | "query" : { "match": { "stringField" : "string" } }, + | "score_mode": "min", + | "boost": 2.0, + | "boost_mode": "avg", + | "max_boost": 42.0, + | "min_score": 32.0, + | "functions": [ + | { + | "script_score": { + | "script": { + | "source": "params.agg1 + params.agg2 > 10" + | } + | } + | }, + | { + | "random_score": {}, + | "weight": 2.0 + | }, + | { + | "exp": { + | "field": { + | "origin": "2013-09-17", + | "scale": "10d", + | "offset": "5d" + | }, + | "multi_value_mode": "max" + | }, + | "weight": 10.0 + | } + | ] + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("fuzzy") { + val query = fuzzy("stringField", "test") + val queryTs = fuzzy(TestDocument.stringField, "test") + val queryWithFuzzinessAuto = fuzzy(TestDocument.stringField, "test").fuzziness("AUTO") + val queryWithMaxExpansions = fuzzy(TestDocument.stringField, "test").maxExpansions(50) + val queryWithPrefixLength = fuzzy(TestDocument.stringField, "test").prefixLength(3) + val queryWithAllParameters = + fuzzy(TestDocument.stringField, "test").prefixLength(3).fuzziness("AUTO").maxExpansions(50) + val queryWithSuffix = fuzzy(TestDocument.stringField.raw, "test") + + val expected = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedWithFuzzinessAuto = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "fuzziness": "AUTO" + | } + | } + |} + |""".stripMargin + + val expectedWithMaxExpansions = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "max_expansions": 50 + | } + | } + |} + |""".stripMargin + + val expectedWithPrefixLength = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "prefix_length": 3 + | } + | } + |} + |""".stripMargin + + val expectedWithAllParameters = + """ + |{ + | "fuzzy": { + | "stringField": { + | "value": "test", + | "fuzziness": "AUTO", + | "max_expansions": 50, + | "prefix_length": 3 + | } + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "fuzzy": { + | "stringField.raw": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithFuzzinessAuto.toJson(fieldPath = None))(equalTo(expectedWithFuzzinessAuto.toJson)) && + assert(queryWithMaxExpansions.toJson(fieldPath = None))(equalTo(expectedWithMaxExpansions.toJson)) && + assert(queryWithPrefixLength.toJson(fieldPath = None))(equalTo(expectedWithPrefixLength.toJson)) && + assert(queryWithAllParameters.toJson(fieldPath = None))(equalTo(expectedWithAllParameters.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) + }, + test("geoDistance") { + val queryWithHash = + geoDistance(TestDocument.geoPointField, GeoHash("drm3btev3e86"), Distance(200, Kilometers)) + val queryWithPoint = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + val queryWithDistanceType = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).distanceType(Plane) + val queryWithName = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).name("name") + val queryWithValidationMethod = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoDistance(TestDocument.geoPointField, GeoPoint(20.0, 21.1), Distance(200, Kilometers)) + .validationMethod(IgnoreMalformed) + .distanceType(Plane) + .name("name") + + val expectedWithHash = + """ + |{ + | "geo_distance": { + | "geoPointField": "drm3btev3e86", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithDistance = + """ + |{ + | "geo_distance": { + | "distance": "200.0km", + | "geoPointField": "20.0,21.1" + | } + |} + |""".stripMargin + + val expectedWithDistanceType = + """ + |{ + | "geo_distance": { + | "distance_type" : "plane", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithName = + """ + |{ + | "geo_distance": { + | "_name": "name", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithValidationMethod = + """ + |{ + | "geo_distance": { + | "validation_method": "IGNORE_MALFORMED", + | "geoPointField": "20.0,21.1", + | "distance": "200.0km" + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "geo_distance": { + | "validation_method": "IGNORE_MALFORMED", + | "distance_type" : "plane", + | "_name": "name", + | "distance": "200.0km", + | "geoPointField": "20.0,21.1" + | } + |} + |""".stripMargin + + assert(queryWithHash.toJson(fieldPath = None))(equalTo(expectedWithHash.toJson)) && + assert(queryWithPoint.toJson(fieldPath = None))(equalTo(expectedWithDistance.toJson)) && + assert(queryWithDistanceType.toJson(fieldPath = None))(equalTo(expectedWithDistanceType.toJson)) && + assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && + assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("geoPolygon") { + val query = + geoPolygon("testField", Chunk("40, -70", "30, -80", "20, -90")) + val queryTs = + geoPolygon(TestDocument.stringField, Chunk("drm3btev3e86", "drm3btev3e87")) + val queryWithName = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).name("name") + val queryWithValidationMethod = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")).validationMethod( + IgnoreMalformed + ) + val queryWithAllParams = + geoPolygon(TestDocument.stringField, Chunk("40, -70", "30, -80", "20, -90")) + .validationMethod(IgnoreMalformed) + .name("name") + + val expected = + """ + |{ + | "geo_polygon": { + | "testField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedTs = + """ + |{ + | "geo_polygon": { + | "stringField": { + | "points": ["drm3btev3e86", "drm3btev3e87"] + | } + | } + |} + |""".stripMargin + + val expectedWithName = + """ + |{ + | "geo_polygon": { + | "_name": "name", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedWithValidationMethod = + """ + |{ + | "geo_polygon": { + | "validation_method": "IGNORE_MALFORMED", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "geo_polygon": { + | "validation_method": "IGNORE_MALFORMED", + | "_name": "name", + | "stringField": { + | "points": ["40, -70", "30, -80", "20, -90"] + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expectedTs.toJson)) && + assert(queryWithName.toJson(fieldPath = None))(equalTo(expectedWithName.toJson)) && + assert(queryWithValidationMethod.toJson(fieldPath = None))(equalTo(expectedWithValidationMethod.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("hasChild") { + val query = hasChild("child", matches(TestDocument.stringField, "test")) + val queryWithIgnoreUnmapped = hasChild("child", matches("field", "value")).ignoreUnmappedTrue + val queryWithInnerHits = hasChild("child", matches("field", "value")).innerHits + val queryWithMaxChildren = hasChild("child", matches("field", "value")).maxChildren(5) + val queryWithMinChildren = hasChild("child", matches("field", "value")).minChildren(1) + val queryWithScoreMode = hasChild("child", matches("field", "value")).scoreMode(ScoreMode.Avg) + val queryWithAllParams = hasChild("child", matches("field", "value")) + .scoreMode(ScoreMode.Avg) + .ignoreUnmappedTrue + .innerHits + .maxChildren(5) + .minChildren(1) + + val expected = + """ + |{ + | "has_child": { + | "type": "child", + | "query": { + | "match": { + | "stringField" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "has_child": { + | "type": "child", + | "ignore_unmapped": true, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "has_child": { + | "type": "child", + | "inner_hits": {}, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithMaxChildren = + """ + |{ + | "has_child": { + | "type": "child", + | "max_children": 5, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithMinChildren = + """ + |{ + | "has_child": { + | "type": "child", + | "min_children": 1, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithScoreMode = + """ + |{ + | "has_child": { + | "type": "child", + | "score_mode": "avg", + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "has_child": { + | "type": "child", + | "score_mode": "avg", + | "ignore_unmapped": true, + | "inner_hits": {}, + | "max_children": 5, + | "min_children": 1, + | "query": { + | "match": { + | "field" : "value" + | } + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithMaxChildren.toJson(fieldPath = None))(equalTo(expectedWithMaxChildren.toJson)) && + assert(queryWithMinChildren.toJson(fieldPath = None))(equalTo(expectedWithMinChildren.toJson)) && + assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("hasParent") { + val query = hasParent("parent", matches(TestDocument.stringField, "test")) + val queryWithBoost = hasParent("parent", matches(TestDocument.stringField, "test")).boost(3) + val queryWithScore = hasParent("parent", matches("field", "test")).withScoreFalse + val queryWithIgnoreUnmapped = hasParent("parent", matches("field", "test")).ignoreUnmappedFalse + val queryWithScoreAndIgnoreUnmapped = + hasParent("parent", matches("field", "test")).withScoreTrue.ignoreUnmappedTrue + val queryWithInnerHits = hasParent("parent", matches("field", "test")).innerHits + val queryWithAllParams = hasParent("parent", matches(TestDocument.stringField, "test")) + .boost(3) + .withScoreFalse + .ignoreUnmappedFalse + .innerHits + val expected = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | }, + | "boost": 3.0 + | } + |} + |""".stripMargin + + val expectedWithScore = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "score": false, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "ignore_unmapped": false, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithScoreAndIgnoreUnmapped = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "score": true, + | "ignore_unmapped": true, + | "query": { + | "match": { + | "field" : "test" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "field" : "test" + | } + | }, + | "inner_hits": {} + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "has_parent": { + | "parent_type": "parent", + | "query": { + | "match": { + | "stringField" : "test" + | } + | }, + | "boost": 3.0, + | "ignore_unmapped": false, + | "score": false, + | "inner_hits": {} + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithScore.toJson(fieldPath = None))(equalTo(expectedWithScore.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithScoreAndIgnoreUnmapped.toJson(fieldPath = None))( + equalTo(expectedWithScoreAndIgnoreUnmapped.toJson) + ) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("ids") { + val query = ids("1", "2", "3") + + val expected = + """ + |{ + | "ids": { + | "values": ["1", "2", "3"] + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("kNN") { + val queryString = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryBool = kNN(TestDocument.booleanField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryInt = kNN(TestDocument.intField, 5, 10, Chunk(1.1, 2.2, 3.3)) + val queryWithSimilarity = kNN(TestDocument.stringField, 5, 10, Chunk(1.1, 2.2, 3.3)).similarity(3.14) + + val expectedString = + """ + |{ + | "field": "stringField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "field": "booleanField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "field": "intField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10 + |} + |""".stripMargin + + val expectedWithSimilarity = + """ + |{ + | "field": "stringField", + | "query_vector": [1.1, 2.2, 3.3], + | "k": 5, + | "num_candidates": 10, + | "similarity": 3.14 + |} + |""".stripMargin + + assert(queryString.toJson)(equalTo(expectedString.toJson)) && + assert(queryBool.toJson)(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson)(equalTo(expectedInt.toJson)) && + assert(queryWithSimilarity.toJson)(equalTo(expectedWithSimilarity.toJson)) + }, + test("matchAll") { + val query = matchAll + val queryWithBoost = matchAll.boost(3.14) + + val expected = + """ + |{ + | "match_all": {} + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "match_all": { + | "boost": 3.14 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("matchBooleanPrefix") { + val queryString = matchBooleanPrefix("stringField", "test") + val queryBool = matchBooleanPrefix("booleanField", true) + val queryInt = matchBooleanPrefix("intField", 1) + val queryStringTs = matchBooleanPrefix(TestDocument.stringField, "test") + val queryBoolTs = matchBooleanPrefix(TestDocument.booleanField, true) + val queryIntTs = matchBooleanPrefix(TestDocument.intField, 1) + val queryWithSuffix = matchBooleanPrefix(TestDocument.stringField.raw, "test") + val queryWithMinimumShouldMatch = matchBooleanPrefix(TestDocument.stringField, "test").minimumShouldMatch(3) + + val expectedString = + """ + |{ + | "match_bool_prefix": { + | "stringField": "test" + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "match_bool_prefix": { + | "booleanField": true + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "match_bool_prefix": { + | "intField": 1 + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "match_bool_prefix": { + | "stringField.raw": "test" + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "match_bool_prefix": { + | "stringField": { + | "query": "test", + | "minimum_should_match": 3 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))(equalTo(expectedWithMinimumShouldMatch.toJson)) + }, + test("matches") { + val query = matches("testField", true) + val queryTsInt = matches(TestDocument.intField, 39) + val queryTsString = matches(TestDocument.stringField, "test") + + val expected = + """ + |{ + | "match": { + | "testField": true + | } + |} + |""".stripMargin + + val expectedTsInt = + """ + |{ + | "match": { + | "intField": 39 + | } + |} + |""".stripMargin + + val expectedTsString = + """ + |{ + | "match": { + | "stringField": "test" + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTsInt.toJson(fieldPath = None))(equalTo(expectedTsInt.toJson)) && + assert(queryTsString.toJson(fieldPath = None))(equalTo(expectedTsString.toJson)) + }, + test("matchPhrase") { + val querySimple = matchPhrase("stringField", "this is a test") + val queryRaw = matchPhrase("stringField.raw", "this is a test") + val querySimpleTs = matchPhrase(TestDocument.stringField, "this is a test") + val queryRawTs = matchPhrase(TestDocument.stringField.raw, "this is a test") + val querySimpleTsWithBoost = matchPhrase(TestDocument.stringField, "this is a test").boost(3) + + val expectedSimple = + """ + |{ + | "match_phrase": { + | "stringField": "this is a test" + | } + |} + |""".stripMargin + + val expectedRaw = + """ + |{ + | "match_phrase": { + | "stringField.raw": "this is a test" + | } + |} + |""".stripMargin + + val expectedSimpleTsWithBoost = + """ + |{ + | "match_phrase": { + | "stringField": { + | "query": "this is a test", + | "boost": 3.0 + | } + | } + |} + |""".stripMargin + + assert(querySimple.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && + assert(querySimpleTs.toJson(fieldPath = None))(equalTo(expectedSimple.toJson)) && + assert(queryRaw.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && + assert(queryRawTs.toJson(fieldPath = None))(equalTo(expectedRaw.toJson)) && + assert(querySimpleTsWithBoost.toJson(fieldPath = None))(equalTo(expectedSimpleTsWithBoost.toJson)) + }, + test("matchPhrasePrefix") { + val query = matchPhrasePrefix("stringField", "test") + val queryTs = matchPhrasePrefix(TestDocument.stringField, "test") + + val expected = + """ + |{ + | "match_phrase_prefix": { + | "stringField": { + | "query" : "test" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) + }, + test("multiMatch") { + val query = multiMatch("this is a test") + val queryWithFields = multiMatch("this is a test").fields("stringField1", "stringField2") + val queryWithFieldsTs = multiMatch("this is a test").fields(TestDocument.stringField) + val queryWithFieldsSuffix = multiMatch("this is a test").fields(TestDocument.stringField.raw) + val queryWithType = multiMatch("this is a test").matchingType(BestFields) + val queryWithBoost = multiMatch("this is a test").boost(2.2) + val queryWithMinimumShouldMatch = multiMatch("this is a test").minimumShouldMatch(2) + val queryWithAllParams = multiMatch("this is a test") + .fields(TestDocument.stringField) + .matchingType(BestFields) + .boost(2.2) + .minimumShouldMatch(2) + + val expected = + """ + |{ + | "multi_match": { + | "query": "this is a test" + | } + |} + |""".stripMargin + + val expectedWithFields = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField1", "stringField2" ] + | } + |} + |""".stripMargin + + val expectedWithFieldsTs = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField" ] + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "fields": [ "stringField.raw" ] + | } + |} + |""".stripMargin + + val expectedWithType = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "type": "best_fields" + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "boost": 2.2 + | } + |} + |""".stripMargin + + val expectedWithMinimumShouldMatch = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "multi_match": { + | "query": "this is a test", + | "type": "best_fields", + | "fields": [ "stringField" ], + | "boost": 2.2, + | "minimum_should_match": 2 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithFields.toJson(fieldPath = None))(equalTo(expectedWithFields.toJson)) && + assert(queryWithFieldsTs.toJson(fieldPath = None))(equalTo(expectedWithFieldsTs.toJson)) && + assert(queryWithFieldsSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) && + assert(queryWithType.toJson(fieldPath = None))(equalTo(expectedWithType.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithMinimumShouldMatch.toJson(fieldPath = None))( + equalTo(expectedWithMinimumShouldMatch.toJson) + ) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("nested") { + val query = nested(TestDocument.subDocumentList, matchAll) + val queryWithNested = nested(TestDocument.subDocumentList, nested("items", term("testField", "test"))) + val queryWithIgnoreUnmapped = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedTrue + val queryWithInnerHits = + nested(TestDocument.subDocumentList, matchAll).innerHits( + InnerHits() + .from(0) + .size(3) + .name("innerHitName") + .highlights(highlight("stringField")) + .excludes("longField") + .includes("intField") + ) + val queryWithInnerHitsEmpty = nested(TestDocument.subDocumentList, matchAll).innerHits + val queryWithScoreMode = nested(TestDocument.subDocumentList, matchAll).scoreMode(ScoreMode.Avg) + val queryWithAllParams = nested(TestDocument.subDocumentList, matchAll).ignoreUnmappedFalse + .innerHits(InnerHits().from(10).size(20).name("innerHitName")) + .scoreMode(ScoreMode.Min) + + val expected = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | } + | } + |} + |""".stripMargin + + val expectedWithNested = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "nested": { + | "path": "subDocumentList.items", + | "query": { + | "term": { + | "subDocumentList.items.testField": { + | "value": "test" + | } + | } + | } + | } + | } + | } + |} + |""".stripMargin + + val expectedWithIgnoreUnmapped = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "ignore_unmapped": true + | } + |} + |""".stripMargin + + val expectedWithInnerHits = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "inner_hits": { + | "from": 0, + | "size": 3, + | "name": "innerHitName", + | "highlight" : { + | "fields" : { + | "subDocumentList.stringField" : {} + | } + | }, + | "_source" : { + | "includes" : [ + | "intField" + | ], + | "excludes" : [ + | "longField" + | ] + | } + | } + | } + |} + |""".stripMargin + + val expectedWithInnerHitsEmpty = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "inner_hits": {} + | } + |} + |""".stripMargin + + val expectedWithScoreMode = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "score_mode": "avg" + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "nested": { + | "path": "subDocumentList", + | "query": { + | "match_all": {} + | }, + | "ignore_unmapped": false, + | "score_mode": "min", + | "inner_hits": { + | "from": 10, + | "size": 20, + | "name": "innerHitName" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithNested.toJson(fieldPath = None))(equalTo(expectedWithNested.toJson)) && + assert(queryWithIgnoreUnmapped.toJson(fieldPath = None))(equalTo(expectedWithIgnoreUnmapped.toJson)) && + assert(queryWithInnerHits.toJson(fieldPath = None))(equalTo(expectedWithInnerHits.toJson)) && + assert(queryWithInnerHitsEmpty.toJson(fieldPath = None))(equalTo(expectedWithInnerHitsEmpty.toJson)) && + assert(queryWithScoreMode.toJson(fieldPath = None))(equalTo(expectedWithScoreMode.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("prefix") { + val query = prefix(TestDocument.stringField, "test") + val queryWithCaseInsensitive = prefix(TestDocument.stringField, "test").caseInsensitiveTrue + + val expected = + """ + |{ + | "prefix": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "prefix": { + | "stringField": { + | "value": "test", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) + }, + test("range") { + val queryEmpty = range(TestDocument.intField) + val queryEmptyWithBoost = range(TestDocument.intField).boost(3.14) + val queryLowerBound = range(TestDocument.intField).gt(23) + val queryUpperBound = range(TestDocument.intField).lt(45) + val queryInclusiveLowerBound = range(TestDocument.intField).gte(23) + val queryInclusiveUpperBound = range(TestDocument.intField).lte(45) + val queryMixedBounds = range(TestDocument.intField).gt(10).lte(99) + val queryMixedBoundsWithBoost = range(TestDocument.intField).gt(10).lte(99).boost(3.14) + val queryWithFormat = range(TestDocument.dateField).gt(LocalDate.of(2020, 1, 10)).format("yyyy-MM-dd") + + val expectedEmpty = + """ + |{ + | "range": { + | "intField": { + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "range": { + | "intField": { + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedLowerBound = + """ + |{ + | "range": { + | "intField": { + | "gt": 23 + | } + | } + |} + |""".stripMargin + + val expectedUpperBound = + """ + |{ + | "range": { + | "intField": { + | "lt": 45 + | } + | } + |} + |""".stripMargin + + val expectedInclusiveLowerBound = + """ + |{ + | "range": { + | "intField": { + | "gte": 23 + | } + | } + |} + |""".stripMargin + + val expectedInclusiveUpperBound = + """ + |{ + | "range": { + | "intField": { + | "lte": 45 + | } + | } + |} + |""".stripMargin + + val expectedMixedBounds = + """ + |{ + | "range": { + | "intField": { + | "gt": 10, + | "lte": 99 + | } + | } + |} + |""".stripMargin + + val expectedMixedBoundsWithBoost = + """ + |{ + | "range": { + | "intField": { + | "gt": 10, + | "lte": 99, + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithFormat = + """ + |{ + | "range": { + | "dateField": { + | "gt": "2020-01-10", + | "format": "yyyy-MM-dd" + | } + | } + |} + |""".stripMargin + + assert(queryEmpty.toJson(fieldPath = None))(equalTo(expectedEmpty.toJson)) && + assert(queryEmptyWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryLowerBound.toJson(fieldPath = None))(equalTo(expectedLowerBound.toJson)) && + assert(queryUpperBound.toJson(fieldPath = None))(equalTo(expectedUpperBound.toJson)) && + assert(queryInclusiveLowerBound.toJson(fieldPath = None))(equalTo(expectedInclusiveLowerBound.toJson)) && + assert(queryInclusiveUpperBound.toJson(fieldPath = None))(equalTo(expectedInclusiveUpperBound.toJson)) && + assert(queryMixedBounds.toJson(fieldPath = None))(equalTo(expectedMixedBounds.toJson)) && + assert(queryMixedBoundsWithBoost.toJson(fieldPath = None))(equalTo(expectedMixedBoundsWithBoost.toJson)) && + assert(queryWithFormat.toJson(fieldPath = None))(equalTo(expectedWithFormat.toJson)) + }, + test("regexp") { + val query = regexp("stringField", "t.*st") + val queryTs = regexp(TestDocument.stringField, "t.*st") + val queryWithCaseInsensitive = regexp(TestDocument.stringField, "t.*st").caseInsensitiveTrue + val queryWithSuffix = regexp(TestDocument.stringField.raw, "t.*st") + + val expected = + """ + |{ + | "regexp": { + | "stringField": { + | "value": "t.*st" + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "regexp": { + | "stringField": { + | "value": "t.*st", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithSuffix = + """ + |{ + | "regexp": { + | "stringField.raw": { + | "value": "t.*st" + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryTs.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithSuffix.toJson(fieldPath = None))(equalTo(expectedWithSuffix.toJson)) + }, + test("script") { + val query = + ElasticQuery.script(Script("doc['day_of_week'].value > params['day']").params("day" -> 2).lang(Painless)) + val queryWithBoost = ElasticQuery.script(Script("doc['day_of_week'].value > 2")).boost(2.0) + + val expected = + """ + |{ + | "script": { + | "script": { + | "lang": "painless", + | "source": "doc['day_of_week'].value > params['day']", + | "params": { + | "day": 2 + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "script": { + | "script": { + | "source": "doc['day_of_week'].value > 2" + | }, + | "boost": 2.0 + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("startsWith") { + val query = startsWith(TestDocument.stringField, "test") + val queryWithBoost = startsWith(TestDocument.stringField, "test").boost(3.14) + val queryWithCaseInsensitive = startsWith(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = startsWith(TestDocument.stringField, "test").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "test*", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("term") { + val queryString = term(TestDocument.stringField, "test") + val queryBool = term(TestDocument.booleanField, true) + val queryInt = term(TestDocument.intField, 21) + val queryWithBoost = term(TestDocument.stringField, "test").boost(10.21) + val queryWithCaseInsensitive = term(TestDocument.stringField, "test").caseInsensitiveTrue + val queryWithAllParams = term(TestDocument.stringField, "test").boost(3.14).caseInsensitiveFalse + + val expectedString = + """ + |{ + | "term": { + | "stringField": { + | "value": "test" + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "term": { + | "booleanField": { + | "value": true + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "term": { + | "intField": { + | "value": 21 + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "boost": 10.21 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "term": { + | "stringField": { + | "value": "test", + | "boost": 3.14, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + }, + test("terms") { + val queryString = terms(TestDocument.stringField, "a", "b", "c") + val queryBool = terms(TestDocument.booleanField, true, false) + val queryInt = terms(TestDocument.intField, 1, 2, 3, 4) + val queryWithBoost = terms(TestDocument.stringField, "a", "b", "c").boost(10.21) + + val expectedString = + """ + |{ + | "terms": { + | "stringField": [ "a", "b", "c" ] + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms": { + | "booleanField": [ true, false ] + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms": { + | "intField": [ 1, 2, 3, 4 ] + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms": { + | "stringField": [ "a", "b", "c" ], + | "boost": 10.21 + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("termsSet") { + val queryString = + termsSet(field = "stringField", minimumShouldMatchField = "required_matches", terms = "a", "b", "c") + val queryBool = + termsSet(field = "booleanField", minimumShouldMatchField = "required_matches", terms = true, false) + val queryInt = termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3) + val queryStringTs = termsSet( + field = TestDocument.stringField, + minimumShouldMatchField = TestDocument.stringField, + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSet( + field = TestDocument.booleanField, + minimumShouldMatchField = TestDocument.booleanField, + terms = true, + false + ) + val queryIntTs = + termsSet(field = TestDocument.intField, minimumShouldMatchField = TestDocument.intField, terms = 1, 2, 3) + val queryWithBoost = + termsSet(field = "intField", minimumShouldMatchField = "required_matches", terms = 1, 2, 3).boost(10.0) + + val expectedString = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3], + | "minimum_should_match_field": "required_matches" + | } + | } + |} + |""".stripMargin + + val expectedStringTs = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_field": "stringField" + | } + | } + |} + |""".stripMargin + + val expectedBoolTs = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_field": "booleanField" + | } + | } + |} + |""".stripMargin + + val expectedIntTs = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3 ], + | "minimum_should_match_field": "intField" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3 ], + | "minimum_should_match_field": "required_matches", + | "boost": 10.0 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) + }, + test("termsSetScript") { + val queryString = termsSetScript( + field = "stringField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBool = termsSetScript( + field = "booleanField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryInt = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ) + val queryStringTs = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = "a", + "b", + "c" + ) + val queryBoolTs = termsSetScript( + field = TestDocument.booleanField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = true, + false + ) + val queryIntTs = termsSetScript( + field = TestDocument.intField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ) + val queryWithBoost = termsSetScript( + field = "intField", + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = 1, + 2, + 3, + 4 + ).boost(10.0) + + val expectedString = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedBool = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedInt = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedStringTs = + """ + |{ + | "terms_set": { + | "stringField": { + | "terms": [ "a", "b", "c" ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedBoolTs = + """ + |{ + | "terms_set": { + | "booleanField": { + | "terms": [ true, false ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedIntTs = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | } + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "terms_set": { + | "intField": { + | "terms": [ 1, 2, 3, 4 ], + | "minimum_should_match_script": { + | "source": "doc['intField'].value" + | }, + | "boost": 10.0 + | } + | } + |} + |""".stripMargin + + assert(queryString.toJson(fieldPath = None))(equalTo(expectedString.toJson)) && + assert(queryBool.toJson(fieldPath = None))(equalTo(expectedBool.toJson)) && + assert(queryInt.toJson(fieldPath = None))(equalTo(expectedInt.toJson)) && + assert(queryStringTs.toJson(fieldPath = None))(equalTo(expectedStringTs.toJson)) && + assert(queryBoolTs.toJson(fieldPath = None))(equalTo(expectedBoolTs.toJson)) && + assert(queryIntTs.toJson(fieldPath = None))(equalTo(expectedIntTs.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) + }, + test("wildcard") { + val query = wildcard(TestDocument.stringField, "[a-zA-Z]+") + val queryWithBoost = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(3.14) + val queryWithCaseInsensitive = wildcard(TestDocument.stringField, "[a-zA-Z]+").caseInsensitiveTrue + val queryWithAllParams = wildcard(TestDocument.stringField, "[a-zA-Z]+").boost(39.2).caseInsensitiveFalse + + val expected = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+" + | } + | } + |} + |""".stripMargin + + val expectedWithBoost = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "boost": 3.14 + | } + | } + |} + |""".stripMargin + + val expectedWithCaseInsensitive = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "case_insensitive": true + | } + | } + |} + |""".stripMargin + + val expectedWithAllParams = + """ + |{ + | "wildcard": { + | "stringField": { + | "value": "[a-zA-Z]+", + | "boost": 39.2, + | "case_insensitive": false + | } + | } + |} + |""".stripMargin + + assert(query.toJson(fieldPath = None))(equalTo(expected.toJson)) && + assert(queryWithBoost.toJson(fieldPath = None))(equalTo(expectedWithBoost.toJson)) && + assert(queryWithCaseInsensitive.toJson(fieldPath = None))(equalTo(expectedWithCaseInsensitive.toJson)) && + assert(queryWithAllParams.toJson(fieldPath = None))(equalTo(expectedWithAllParams.toJson)) + } + ) + ) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala index 5545b7868..fdd847540 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/HttpElasticExecutorSpec.scala @@ -1,298 +1,298 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticAggregation.termsAggregation -import zio.elasticsearch.ElasticQuery.{kNN, matchAll, term} -import zio.elasticsearch.domain.TestDocument -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.executor.response.{BulkResponse, CreateBulkResponse, Shards} -import zio.elasticsearch.request.CreationOutcome.Created -import zio.elasticsearch.request.DeletionOutcome.Deleted -import zio.elasticsearch.request.UpdateConflicts.Proceed -import zio.elasticsearch.request.UpdateOutcome -import zio.elasticsearch.result.{TermsAggregationBucketResult, TermsAggregationResult, UpdateByQueryResult} -import zio.elasticsearch.script.Script -import zio.test.Assertion._ -import zio.test.{Spec, TestEnvironment, TestResultZIOOps, assertZIO} - -object HttpElasticExecutorSpec extends SttpBackendStubSpec { - def spec: Spec[TestEnvironment, Any] = - suite("HttpElasticExecutor")( - test("aggregation") { - val executorAggregate = - Executor - .execute(ElasticRequest.aggregate(index, termsAggregation(name = "aggregation1", field = "name"))) - .aggregations - - val expectedTermsAggregationResult = - Map( - "aggregation1" -> TermsAggregationResult( - docErrorCount = 0, - sumOtherDocCount = 0, - buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) - ) - ) - - assertZIO(executorAggregate)(equalTo(expectedTermsAggregationResult)) - }, - test("bulk") { - val executorBulk = Executor.execute(ElasticRequest.bulk(ElasticRequest.create(index, doc)).refreshTrue) - - val expectedBulkResponse = - BulkResponse( - took = 3, - errors = false, - items = Chunk( - CreateBulkResponse( - index = "repositories", - id = "123", - version = Some(1), - result = Some("created"), - shards = Some(Shards(total = 1, successful = 1, failed = 0)), - status = Some(201), - error = None - ) - ) - ) - - assertZIO(executorBulk)(equalTo(expectedBulkResponse)) - }, - test("count") { - val executorCount = Executor.execute(ElasticRequest.count(index, matchAll).routing(Routing("routing"))) - - assertZIO(executorCount)(equalTo(2)) - }, - test("create") { - val executorCreate = - Executor - .execute( - ElasticRequest - .create[TestDocument](index = index, doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorCreate)(equalTo(DocumentId("V4x8q4UB3agN0z75fv5r"))) - }, - test("create with ID") { - val executorCreateDocumentId = - Executor.execute( - ElasticRequest - .create[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorCreateDocumentId)(equalTo(Created)) - }, - test("createIndex") { - val executorCreateIndex = Executor.execute(ElasticRequest.createIndex(index = index)) - - val mapping = - """ - |{ - | "settings": { - | "index": { - | "number_of_shards": 1 - | } - | }, - | "mappings": { - | "_routing": { - | "required": true - | }, - | "properties": { - | "id": { - | "type": "keyword" - | } - | } - | } - |} - |""".stripMargin - val executorCreateIndexMapping = - Executor.execute(ElasticRequest.createIndex(index = index, definition = mapping)) - - assertZIO(executorCreateIndex)(equalTo(Created)) && - assertZIO(executorCreateIndexMapping)(equalTo(Created)) - }, - test("deleteById") { - val executorDeleteById = - Executor.execute( - ElasticRequest - .deleteById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorDeleteById)(equalTo(Deleted)) - }, - test("deleteByQuery") { - val executorDeleteByQuery = - Executor.execute( - ElasticRequest.deleteByQuery(index = index, query = matchAll).refreshTrue.routing(Routing("routing")) - ) - - assertZIO(executorDeleteByQuery)(equalTo(Deleted)) - }, - test("deleteIndex") { - val executorDeleteIndex = Executor.execute(ElasticRequest.deleteIndex(index = index)) - - assertZIO(executorDeleteIndex)(equalTo(Deleted)) - }, - test("exists") { - val executorExists = - Executor.execute( - ElasticRequest - .exists(index = index, id = DocumentId("example-id")) - .routing(Routing("routing")) - ) - - assertZIO(executorExists)(isTrue) - }, - test("getById") { - val executorGetById = - Executor - .execute( - ElasticRequest - .getById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) - .routing(Routing("routing")) - ) - .documentAs[TestDocument] - - assertZIO(executorGetById)(isSome(equalTo(doc))) - }, - test("knnSearch") { - val executorSearch = - Executor - .execute( - ElasticRequest - .knnSearch(selectors = index, query = kNN(TestDocument.vectorField, 2, 5, Chunk(-5.0, 9.0, -12.0))) - ) - .documentAs[TestDocument] - assertZIO(executorSearch)(equalTo(Chunk(doc))) - }, - test("refresh") { - val executorRefresh = Executor.execute(ElasticRequest.refresh(selectors = index)) - assertZIO(executorRefresh)(equalTo(true)) - }, - test("search") { - val executorSearch = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll)) - .documentAs[TestDocument] - val terms = termsAggregation(name = "aggregation1", field = "name") - val executorSearchWithTerms = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) - .documentAs[TestDocument] - - assertZIO(executorSearch)(equalTo(Chunk(doc))) && assertZIO(executorSearchWithTerms)(equalTo(Chunk(doc))) - }, - test("search + aggregate") { - val terms = termsAggregation(name = "aggregation1", field = "name") - val executorSearchAggregations = - Executor - .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) - .aggregations - - val expectedTermsAggregationResult = - Map( - "aggregation1" -> TermsAggregationResult( - docErrorCount = 0, - sumOtherDocCount = 0, - buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) - ) - ) - - assertZIO(executorSearchAggregations)(equalTo(expectedTermsAggregationResult)) - }, - test("update") { - val executorUpdate = - Executor.execute( - ElasticRequest - .update[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .orCreate(doc = secondDoc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpdate)(equalTo(UpdateOutcome.Updated)) - }, - test("updateAllByQuery") { - val executorUpdateAllByQuery = - Executor.execute( - ElasticRequest - .updateAllByQuery(index = index, script = Script("ctx._source['intField']++")) - .conflicts(Proceed) - .routing(Routing("routing")) - .refreshTrue - ) - - val expectedUpdateByQueryResult = - UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) - - assertZIO(executorUpdateAllByQuery)(equalTo(expectedUpdateByQueryResult)) - }, - test("updateByQuery") { - val executorUpdateByQuery = - Executor.execute( - ElasticRequest - .updateByQuery( - index = index, - query = term(field = TestDocument.stringField.keyword, value = "StringField"), - script = Script("ctx._source['intField']++") - ) - .conflicts(Proceed) - .routing(Routing("routing")) - .refreshTrue - ) - - val expectedUpdateByQueryResult = - UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) - - assertZIO(executorUpdateByQuery)(equalTo(expectedUpdateByQueryResult)) - }, - test("updateByScript") { - val executorUpdateByScript = - Executor.execute( - ElasticRequest - .updateByScript( - index = index, - id = DocumentId("V4x8q4UB3agN0z75fv5r"), - script = Script("ctx._source.intField += params['factor']").params("factor" -> 2) - ) - .orCreate(doc = secondDoc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpdateByScript)(equalTo(UpdateOutcome.Updated)) - }, - test("upsert") { - val executorUpsert = - Executor.execute( - ElasticRequest - .upsert[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) - .routing(Routing("routing")) - .refreshTrue - ) - - assertZIO(executorUpsert)(isUnit) - } - ).provideShared(elasticsearchSttpLayer) -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticAggregation.termsAggregation +import zio.elasticsearch.ElasticQuery.{kNN, matchAll, term} +import zio.elasticsearch.domain.TestDocument +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.executor.response.{BulkResponse, CreateBulkResponse, Shards} +import zio.elasticsearch.request.CreationOutcome.Created +import zio.elasticsearch.request.DeletionOutcome.Deleted +import zio.elasticsearch.request.UpdateConflicts.Proceed +import zio.elasticsearch.request.UpdateOutcome +import zio.elasticsearch.result.{TermsAggregationBucketResult, TermsAggregationResult, UpdateByQueryResult} +import zio.elasticsearch.script.Script +import zio.test.Assertion._ +import zio.test.{Spec, TestEnvironment, TestResultZIOOps, assertZIO} + +object HttpElasticExecutorSpec extends SttpBackendStubSpec { + def spec: Spec[TestEnvironment, Any] = + suite("HttpElasticExecutor")( + test("aggregation") { + val executorAggregate = + Executor + .execute(ElasticRequest.aggregate(index, termsAggregation(name = "aggregation1", field = "name"))) + .aggregations + + val expectedTermsAggregationResult = + Map( + "aggregation1" -> TermsAggregationResult( + docErrorCount = 0, + sumOtherDocCount = 0, + buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) + ) + ) + + assertZIO(executorAggregate)(equalTo(expectedTermsAggregationResult)) + }, + test("bulk") { + val executorBulk = Executor.execute(ElasticRequest.bulk(ElasticRequest.create(index, doc)).refreshTrue) + + val expectedBulkResponse = + BulkResponse( + took = 3, + errors = false, + items = Chunk( + CreateBulkResponse( + index = "repositories", + id = "123", + version = Some(1), + result = Some("created"), + shards = Some(Shards(total = 1, successful = 1, failed = 0)), + status = Some(201), + error = None + ) + ) + ) + + assertZIO(executorBulk)(equalTo(expectedBulkResponse)) + }, + test("count") { + val executorCount = Executor.execute(ElasticRequest.count(index, matchAll).routing(Routing("routing"))) + + assertZIO(executorCount)(equalTo(2)) + }, + test("create") { + val executorCreate = + Executor + .execute( + ElasticRequest + .create[TestDocument](index = index, doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorCreate)(equalTo(DocumentId("V4x8q4UB3agN0z75fv5r"))) + }, + test("create with ID") { + val executorCreateDocumentId = + Executor.execute( + ElasticRequest + .create[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorCreateDocumentId)(equalTo(Created)) + }, + test("createIndex") { + val executorCreateIndex = Executor.execute(ElasticRequest.createIndex(index = index)) + + val mapping = + """ + |{ + | "settings": { + | "index": { + | "number_of_shards": 1 + | } + | }, + | "mappings": { + | "_routing": { + | "required": true + | }, + | "properties": { + | "id": { + | "type": "keyword" + | } + | } + | } + |} + |""".stripMargin + val executorCreateIndexMapping = + Executor.execute(ElasticRequest.createIndex(index = index, definition = mapping)) + + assertZIO(executorCreateIndex)(equalTo(Created)) && + assertZIO(executorCreateIndexMapping)(equalTo(Created)) + }, + test("deleteById") { + val executorDeleteById = + Executor.execute( + ElasticRequest + .deleteById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorDeleteById)(equalTo(Deleted)) + }, + test("deleteByQuery") { + val executorDeleteByQuery = + Executor.execute( + ElasticRequest.deleteByQuery(index = index, query = matchAll).refreshTrue.routing(Routing("routing")) + ) + + assertZIO(executorDeleteByQuery)(equalTo(Deleted)) + }, + test("deleteIndex") { + val executorDeleteIndex = Executor.execute(ElasticRequest.deleteIndex(index = index)) + + assertZIO(executorDeleteIndex)(equalTo(Deleted)) + }, + test("exists") { + val executorExists = + Executor.execute( + ElasticRequest + .exists(index = index, id = DocumentId("example-id")) + .routing(Routing("routing")) + ) + + assertZIO(executorExists)(isTrue) + }, + test("getById") { + val executorGetById = + Executor + .execute( + ElasticRequest + .getById(index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r")) + .routing(Routing("routing")) + ) + .documentAs[TestDocument] + + assertZIO(executorGetById)(isSome(equalTo(doc))) + }, + test("knnSearch") { + val executorSearch = + Executor + .execute( + ElasticRequest + .knnSearch(selectors = index, query = kNN(TestDocument.vectorField, 2, 5, Chunk(-5.0, 9.0, -12.0))) + ) + .documentAs[TestDocument] + assertZIO(executorSearch)(equalTo(Chunk(doc))) + }, + test("refresh") { + val executorRefresh = Executor.execute(ElasticRequest.refresh(selectors = index)) + assertZIO(executorRefresh)(equalTo(true)) + }, + test("search") { + val executorSearch = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll)) + .documentAs[TestDocument] + val terms = termsAggregation(name = "aggregation1", field = "name") + val executorSearchWithTerms = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) + .documentAs[TestDocument] + + assertZIO(executorSearch)(equalTo(Chunk(doc))) && assertZIO(executorSearchWithTerms)(equalTo(Chunk(doc))) + }, + test("search + aggregate") { + val terms = termsAggregation(name = "aggregation1", field = "name") + val executorSearchAggregations = + Executor + .execute(ElasticRequest.search(selectors = index, query = matchAll, aggregation = terms)) + .aggregations + + val expectedTermsAggregationResult = + Map( + "aggregation1" -> TermsAggregationResult( + docErrorCount = 0, + sumOtherDocCount = 0, + buckets = Chunk(TermsAggregationBucketResult(docCount = 5, key = "name", subAggregations = Map.empty)) + ) + ) + + assertZIO(executorSearchAggregations)(equalTo(expectedTermsAggregationResult)) + }, + test("update") { + val executorUpdate = + Executor.execute( + ElasticRequest + .update[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .orCreate(doc = secondDoc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpdate)(equalTo(UpdateOutcome.Updated)) + }, + test("updateAllByQuery") { + val executorUpdateAllByQuery = + Executor.execute( + ElasticRequest + .updateAllByQuery(index = index, script = Script("ctx._source['intField']++")) + .conflicts(Proceed) + .routing(Routing("routing")) + .refreshTrue + ) + + val expectedUpdateByQueryResult = + UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) + + assertZIO(executorUpdateAllByQuery)(equalTo(expectedUpdateByQueryResult)) + }, + test("updateByQuery") { + val executorUpdateByQuery = + Executor.execute( + ElasticRequest + .updateByQuery( + index = index, + query = term(field = TestDocument.stringField.keyword, value = "StringField"), + script = Script("ctx._source['intField']++") + ) + .conflicts(Proceed) + .routing(Routing("routing")) + .refreshTrue + ) + + val expectedUpdateByQueryResult = + UpdateByQueryResult(took = 1, total = 10, updated = 8, deleted = 0, versionConflicts = 2) + + assertZIO(executorUpdateByQuery)(equalTo(expectedUpdateByQueryResult)) + }, + test("updateByScript") { + val executorUpdateByScript = + Executor.execute( + ElasticRequest + .updateByScript( + index = index, + id = DocumentId("V4x8q4UB3agN0z75fv5r"), + script = Script("ctx._source.intField += params['factor']").params("factor" -> 2) + ) + .orCreate(doc = secondDoc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpdateByScript)(equalTo(UpdateOutcome.Updated)) + }, + test("upsert") { + val executorUpsert = + Executor.execute( + ElasticRequest + .upsert[TestDocument](index = index, id = DocumentId("V4x8q4UB3agN0z75fv5r"), doc = doc) + .routing(Routing("routing")) + .refreshTrue + ) + + assertZIO(executorUpsert)(isUnit) + } + ).provideShared(elasticsearchSttpLayer) +} diff --git a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala index cdecc741f..895364f25 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/IndexNameSpec.scala @@ -6,7 +6,7 @@ * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From 509f319eeb5c8d0548779ee026dd39a479bc43d1 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Thu, 19 Jun 2025 14:16:59 +0200 Subject: [PATCH 11/15] Fix strange chars. --- .../zio/elasticsearch/HttpExecutorSpec.scala | 5874 ++++++++--------- 1 file changed, 2937 insertions(+), 2937 deletions(-) diff --git a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala index b62c25206..160aef368 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala @@ -1,2937 +1,2937 @@ -/* - * Copyright 2022 LambdaWorks - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package zio.elasticsearch - -import zio.Chunk -import zio.elasticsearch.ElasticAggregation._ -import zio.elasticsearch.ElasticHighlight.highlight -import zio.elasticsearch.ElasticQuery.{script => _, _} -import zio.elasticsearch.ElasticSort.sortBy -import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} -import zio.elasticsearch.data.GeoPoint -import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} -import zio.elasticsearch.executor.Executor -import zio.elasticsearch.query.DistanceUnit.Kilometers -import zio.elasticsearch.query.FunctionScoreFunction.randomScoreFunction -import zio.elasticsearch.query.MultiMatchType._ -import zio.elasticsearch.query.sort.SortMode.Max -import zio.elasticsearch.query.sort.SortOrder._ -import zio.elasticsearch.query.sort.SourceType.NumberType -import zio.elasticsearch.query.{Distance, FunctionScoreBoostMode, FunctionScoreFunction, InnerHits} -import zio.elasticsearch.request.{CreationOutcome, DeletionOutcome} -import zio.elasticsearch.result.{FilterAggregationResult, Item, MaxAggregationResult, UpdateByQueryResult} -import zio.elasticsearch.script.{Painless, Script} -import zio.json.ast.Json.{Arr, Str} -import zio.schema.codec.JsonCodec -import zio.stream.{Sink, ZSink} -import zio.test.Assertion._ -import zio.test.TestAspect._ -import zio.test._ - -import java.time.LocalDate -import scala.util.Random - -object HttpExecutorSpec extends IntegrationSpec { - - def spec: Spec[TestEnvironment, Any] = { - suite("Executor")( - suite("HTTP Executor")( - suite("aggregation")( - test("aggregate using avg aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(doubleField = 20)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(doubleField = 10)) - .refreshTrue - ) - aggregation = avgAggregation(name = "aggregationDouble", field = TestDocument.doubleField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asAvgAggregation("aggregationDouble") - } yield assert(aggsRes.head.value)(equalTo(15.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using cardinality aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 10)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 20)) - .refreshTrue - ) - aggregation = cardinalityAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - - cardinalityAgg <- aggsRes.asCardinalityAggregation("aggregationInt") - } yield assert(cardinalityAgg.map(_.value))(isSome(equalTo(2))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using extended stats aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 100)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 50)) - .refreshTrue - ) - aggregation = extendedStatsAggregation(name = "aggregation", field = TestDocument.intField).sigma(3) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asExtendedStatsAggregation("aggregation") - } yield assert(aggsRes.head.count)(equalTo(2)) && - assert(aggsRes.head.min)(equalTo(50.0)) && - assert(aggsRes.head.max)(equalTo(100.0)) && - assert(aggsRes.head.avg)(equalTo(75.0)) && - assert(aggsRes.head.sum)(equalTo(150.0)) && - assert(aggsRes.head.sumOfSquares)(equalTo(12500.0)) && - assert(aggsRes.head.variance)(equalTo(625.0)) && - assert(aggsRes.head.variancePopulation)(equalTo(625.0)) && - assert(aggsRes.head.varianceSampling)(equalTo(1250.0)) && - assert(aggsRes.head.stdDeviation)(equalTo(25.0)) && - assert(aggsRes.head.stdDeviationPopulation)(equalTo(25.0)) && - assert(aggsRes.head.stdDeviationSampling)(equalTo(35.35533905932738)) && - assert(aggsRes.head.stdDeviationBoundsResult.upper)(equalTo(150.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.lower)(equalTo(0.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.upperPopulation)(equalTo(150.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.lowerPopulation)(equalTo(0.0)) && - assert(aggsRes.head.stdDeviationBoundsResult.upperSampling)(equalTo(181.06601717798213)) && - assert(aggsRes.head.stdDeviationBoundsResult.lowerSampling)(equalTo(-31.066017177982133)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using filter aggregation with max aggregation as a sub aggregation") { - val expectedResult = ( - "aggregation", - FilterAggregationResult( - docCount = 2, - subAggregations = Map( - "subAggregation" -> MaxAggregationResult(value = 5.0) - ) - ) - ) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(stringField = "test", intField = 7) - secondDocumentUpdated = - secondDocument.copy(stringField = "filterAggregation", intField = 3) - thirdDocumentUpdated = - thirdDocument.copy(stringField = "filterAggregation", intField = 5) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocumentUpdated - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentUpdated - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - thirdDocumentId, - thirdDocumentUpdated - ) - .refreshTrue - ) - query = term(field = TestDocument.stringField, value = secondDocumentUpdated.stringField.toLowerCase) - aggregation = - filterAggregation(name = "aggregation", query = query).withSubAgg( - maxAggregation("subAggregation", TestDocument.intField) - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - - } yield assert(aggsRes.head)(equalTo(expectedResult)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using max aggregation") { - val expectedResponse = ("aggregationInt", MaxAggregationResult(value = 20.0)) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 20)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 10)) - .refreshTrue - ) - aggregation = maxAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes.head)(equalTo(expectedResponse)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using min aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) - .refreshTrue - ) - aggregation = minAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asMinAggregation("aggregationInt") - } yield assert(aggsRes.head.value)(equalTo(23.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentile ranks aggregation") { - val expectedResult = Map("500.0" -> 55.55555555555555, "600.0" -> 100.0) - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 400)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 500)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 550)) - .refreshTrue - ) - aggregation = - percentileRanksAggregation(name = "aggregation", field = "intField", value = 500.0, values = 600.0) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asPercentileRanksAggregation("aggregation") - } yield assert(aggsRes.head.values)(equalTo(expectedResult)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation with multi index") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- Executor - .execute( - ElasticRequest.aggregate( - selectors = MultiIndex.names(firstSearchIndex, secondSearchIndex), - aggregation = aggregation - ) - ) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation with index pattern") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) - aggsRes <- Executor - .execute( - ElasticRequest.aggregate( - selectors = IndexPatternAll, - aggregation = aggregation - ) - ) - .asPercentilesAggregation("aggregationInt") - } yield assert(aggsRes.head.values.size)(equalTo(3)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("aggregate using percentiles aggregation as sub aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - termsAggregation(name = "first", field = TestDocument.stringField.keyword) - .withSubAgg(percentilesAggregation(name = "second", field = TestSubDocument.intField)) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using stats aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 7)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 6)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 10)) - .refreshTrue - ) - aggregation = statsAggregation(name = "aggregation", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asStatsAggregation("aggregation") - } yield assert(aggsRes.head.count)(equalTo(3)) && - assert(aggsRes.head.min)(equalTo(6.0)) && - assert(aggsRes.head.max)(equalTo(10.0)) && - assert(aggsRes.head.avg)(equalTo(7.666666666666667)) && - assert(aggsRes.head.sum)(equalTo(23.0)) - - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using sum aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) - .refreshTrue - ) - aggregation = sumAggregation(name = "aggregationInt", field = TestDocument.intField) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asSumAggregation("aggregationInt") - } yield assert(aggsRes.head.value)(equalTo(223.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using ip range aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => - val updatedA = docA.copy(stringField = "192.168.1.10") - val updatedB = docB.copy(stringField = "192.168.1.200") - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) - .refreshTrue - ) - - aggregation = IpRange( - name = "ip_range_agg", - field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("192.168.1.100")), - IpRange.IpRangeBound( - from = Some("192.168.1.100"), - to = Some("192.168.1.255") - ) - ), - keyed = None, - subAggregations = None - ) - - result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) - agg <- result.aggregation("ip_range_agg") - } yield assertTrue(agg.nonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using ip range aggregation with CIDR masks") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docId1, doc1, docId2, doc2) => - val updated1 = doc1.copy(stringField = "10.0.0.10") - val updated2 = doc2.copy(stringField = "10.0.0.120") - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docId1, updated1)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, docId2, updated2) - .refreshTrue - ) - - aggregation = IpRange( - name = "cidr_agg", - field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), - IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) - ), - keyed = None, - subAggregations = None - ) - - result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) - agg <- result.aggregation("cidr_agg") - } yield assertTrue(agg.nonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using terms aggregation with max aggregation as a sub aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = - termsAggregation(name = "aggregationString", field = TestDocument.stringField.keyword).withSubAgg( - maxAggregation("subAggregation", TestDocument.intField) - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using missing aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = multipleAggregations.aggregations( - missingAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ), - missingAggregation(name = "aggregationString", field = "stringField.keyword") - ) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using multiple terms aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - aggregation = multipleAggregations.aggregations( - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ), - termsAggregation(name = "aggregationInt", field = "intField.keyword") - ) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - .aggregations - } yield assert(aggsRes)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using terms aggregation with nested max aggregation and bucket sort aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 2)) - .refreshTrue - ) - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).orderBy(AggregationOrder("aggregationInt", Desc)) - .withSubAgg(maxAggregation(name = "aggregationInt", field = "intField")) - .withSubAgg( - bucketSortAggregation("aggregationBucket").sort( - ElasticSort.sortBy("aggregationInt").order(Desc) - ) - ) - .size(1) - aggsRes <- Executor - .execute( - ElasticRequest - .aggregate(selectors = firstSearchIndex, aggregation = aggregation) - ) - agg <- aggsRes.asTermsAggregation("aggregationString") - } yield assert(agg.map(_.buckets.size))(isSome(equalTo(1))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using value count aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocument.copy(stringField = "test") - ) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocument.copy(stringField = "test") - ) - .refreshTrue - ) - aggregation = valueCountAggregation(name = "aggregation", field = TestDocument.stringField.keyword) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asValueCountAggregation("aggregation") - - } yield assert(aggsRes.head.value)(equalTo(2)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("aggregate using weighted avg aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - firstDocumentId, - firstDocument.copy(doubleField = 5, intField = 2) - ) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocument.copy(doubleField = 10, intField = 3) - ) - .refreshTrue - ) - aggregation = weightedAvgAggregation( - name = "weightedAggregation", - valueField = TestDocument.doubleField, - weightField = TestDocument.intField - ) - aggsRes <- - Executor - .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) - .asWeightedAvgAggregation("weightedAggregation") - } yield assert(aggsRes.head.value)(equalTo(8.0)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("search with aggregation")( - test("search for first result using match all query with multiple terms aggregations") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchAll - aggregation = termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField")) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - .from(0) - .size(1) - ) - docs <- res.documentAs[TestDocument] - aggs <- res.aggregations - } yield assert(docs.length)(equalTo(1)) && assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test( - "search for first result using match all query with multiple terms aggregations and search after parameter" - ) { - checkOnce(genTestDocument) { firstDocument => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - reqs = (0 to 20).map { i => - ElasticRequest.create[TestDocument]( - firstSearchIndex, - firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = matchAll - aggregation = termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField")) - res <- Executor - .execute( - ElasticRequest - .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - sa <- res.lastSortValue - res2 <- Executor - .execute( - ElasticRequest - .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) - .searchAfter(sa.get) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - docs <- res2.documentAs[TestDocument] - aggs <- res2.aggregations - } yield assert(docs.length)(equalTo(10)) && assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search using match all query with multiple terms aggregations with descending sort on one field") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val firstDocumentWithFixedIntField = firstDocument.copy(intField = 25) - val secondDocumentWithFixedIntField = secondDocument.copy(intField = 32) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentWithFixedIntField) - .refreshTrue - ) - query = matchAll - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withAgg(termsAggregation("aggregationInt", "intField.keyword")) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - .sort(sortBy(field = TestDocument.intField).order(Desc)) - ) - docs <- res.documentAs[TestDocument] - aggs <- res.aggregations - } yield assert(docs)(equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField))) && - assert(aggs)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test( - "search using match all query with terms aggregations, nested max aggregation and nested bucketSelector aggregation" - ) { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 100)) - .refreshTrue - ) - query = matchAll - aggregation = - termsAggregation( - name = "aggregationString", - field = TestDocument.stringField.keyword - ).withSubAgg(maxAggregation(name = "aggregationInt", field = TestDocument.intField)) - .withSubAgg( - bucketSelectorAggregation( - name = "aggregationSelector", - script = Script("params.aggregation_int > 10"), - bucketsPath = Map("aggregation_int" -> "aggregationInt") - ) - ) - res <- Executor.execute( - ElasticRequest - .search( - selectors = firstSearchIndex, - query = query, - aggregation = aggregation - ) - ) - docs <- res.documentAs[TestDocument] - termsAgg <- res.asTermsAggregation("aggregationString") - } yield assert(docs)(isNonEmpty) && assert( - termsAgg.map(_.buckets.size) - )(isSome(equalTo(1))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("counting documents")( - test("successfully count documents with given query") { - checkOnce(genTestDocument) { document => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstCountIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstCountIndex, document).refreshTrue - ) - res <- Executor.execute(ElasticRequest.count(firstCountIndex, matchAll)) - } yield assert(res)(equalTo(1)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstCountIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstCountIndex)).orDie - ), - test("successfully count documents without given query") { - checkOnce(genTestDocument) { document => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondCountIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](secondCountIndex, document).refreshTrue - ) - res <- Executor.execute(ElasticRequest.count(secondCountIndex)) - } yield assert(res)(equalTo(1)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondCountIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondCountIndex)).orDie - ) - ) @@ shrinks(0), - suite("creating document")( - test("successfully create document") { - checkOnce(genTestDocument) { document => - for { - docId <- Executor.execute(ElasticRequest.create[TestDocument](index, document)) - res <- Executor.execute(ElasticRequest.getById(index, docId)).documentAs[TestDocument] - } yield assert(res)(isSome(equalTo(document))) - } - }, - test("successfully create document with ID given") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - assertZIO(Executor.execute(ElasticRequest.create[TestDocument](index, documentId, document)))( - equalTo(CreationOutcome.Created) - ) - } - }, - test("return 'AlreadyExists' if document with given ID already exists") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) - res <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, secondDocument)) - } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) - } - } - ), - suite("creating index")( - test("successfully create index") { - assertZIO(Executor.execute(ElasticRequest.createIndex(createIndexTestName)))( - equalTo(CreationOutcome.Created) - ) - }, - test("return 'AlreadyExists' if index already exists") { - for { - _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - res <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) - } - ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), - suite("creating or updating document")( - test("successfully create document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document))) - } - }, - test("successfully update document") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, firstDocument)) - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, secondDocument)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(secondDocument))) - } - } - ), - suite("deleting document by ID")( - test("successfully delete existing document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.deleteById(index, documentId)) - } yield assert(res)(equalTo(DeletionOutcome.Deleted)) - } - }, - test("return 'NotFound' if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.deleteById(index, documentId)))( - equalTo(DeletionOutcome.NotFound) - ) - } - } - ), - suite("delete index")( - test("successfully delete existing index") { - checkOnce(genIndexName) { name => - for { - _ <- Executor.execute(ElasticRequest.createIndex(name)) - res <- Executor.execute(ElasticRequest.deleteIndex(name)) - } yield assert(res)(equalTo(DeletionOutcome.Deleted)) - } - }, - test("return 'NotFound' if index does not exists") { - checkOnce(genIndexName) { name => - assertZIO(Executor.execute(ElasticRequest.deleteIndex(name)))(equalTo(DeletionOutcome.NotFound)) - } - } - ), - suite("finding document")( - test("return true if the document exists") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.exists(index, documentId)) - } yield assert(res)(isTrue) - } - }, - test("return false if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.exists(index, documentId)))(isFalse) - } - } - ), - suite("retrieving document by ID")( - test("successfully return document") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(res)(isSome(equalTo(document))) - } - }, - test("return None if the document does not exist") { - checkOnce(genDocumentId) { documentId => - assertZIO(Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument])(isNone) - } - }, - test("fail with throwable if decoding fails") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val result = for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestSubDocument] - } yield res - - assertZIO(result.exit)( - fails(isSubtype[Exception](assertException("Could not parse the document: .nestedField(missing)"))) - ) - } - } - ), - suite("refresh index")( - test("successfully refresh existing index") { - assertZIO(Executor.execute(ElasticRequest.refresh(index)))(isTrue) - }, - test("successfully refresh more existing indices") { - for { - _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) - res <- Executor.execute(ElasticRequest.refresh(MultiIndex.names(index, createIndexTestName))) - } yield assert(res)(isTrue) - }, - test("successfully refresh all indices") { - assertZIO(Executor.execute(ElasticRequest.refresh(IndexPatternAll)))(isTrue) - }, - test("return false if index does not exists") { - assertZIO(Executor.execute(ElasticRequest.refresh(refreshFailIndex)))(isFalse) - } - ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), - suite("retrieving document by IDs")( - test("find documents by ids") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ids(firstDocumentId.toString, secondDocumentId.toString) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query) - ) - items <- res.items - } yield assert(items != null)(isTrue) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("kNN search")( - test("search for top two results") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(vectorField = List(1, 5, -20)) - secondDocumentUpdated = secondDocument.copy(vectorField = List(42, 8, -15)) - thirdDocumentUpdated = thirdDocument.copy(vectorField = List(15, 11, 23)) - req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) - req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) - req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) - query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) - res <- Executor.execute(ElasticRequest.knnSearch(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for top two results with filters") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(intField = 15, vectorField = List(1, 5, -20)) - secondDocumentUpdated = secondDocument.copy(intField = 21, vectorField = List(42, 8, -15)) - thirdDocumentUpdated = thirdDocument.copy(intField = 4, vectorField = List(15, 11, 23)) - req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) - req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) - req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) - query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) - filter = ElasticQuery.range(TestDocument.intField).gt(10) - res <- Executor - .execute(ElasticRequest.knnSearch(firstSearchIndex, query).filter(filter)) - .documentAs[TestDocument] - } yield (assert(res)(equalTo(Chunk(firstDocumentUpdated, secondDocumentUpdated)))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents")( - test("search for a document using a boosting query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is a ${firstDocument.stringField} test", intField = 7) - secondDocumentUpdated = - secondDocument.copy( - stringField = s"this is another ${secondDocument.stringField} test", - intField = 5 - ) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - .refreshTrue - ) - query = boosting( - negativeBoost = 0.1f, - negativeQuery = - term(field = TestDocument.stringField, value = firstDocument.stringField.toLowerCase), - positiveQuery = matchPhrase( - field = TestDocument.stringField, - value = "test" - ) - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated)))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a constant score query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "this is a test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = constantScore( - matchPhrase( - field = TestDocument.stringField, - value = "test" - ) - ).boost(2.1) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for first 2 documents using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).from(0).size(2)) - .documentAs[TestDocument] - } yield assert(res.length)(equalTo(2)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for first 2 documents using range query with date format") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - val firstDocumentUpdated = firstDocument.copy(dateField = LocalDate.now.minusDays(2)) - val secondDocumentUpdated = secondDocument.copy(dateField = LocalDate.now) - val thirdDocumentUpdated = thirdDocument.copy(dateField = LocalDate.now.plusDays(2)) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) - .refreshTrue - ) - query = range(TestDocument.dateField).gte(LocalDate.now).format("yyyy-MM-dd").boost(1.0) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, thirdDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for documents with source filtering") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).includes[PartialTestDocument]) - items <- res.items - } yield assert(items.map(item => Right(item.raw)))( - hasSameElements( - List(firstDocument, secondDocument, thirdDocument).map(document => - TestDocument.schema.migrate(PartialTestDocument.schema).flatMap(_(document)).flatMap { - partialDocument => - JsonCodec.jsonEncoder(PartialTestDocument.schema).toJsonAST(partialDocument) - } - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("fail if an excluded source field is attempted to be decoded") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - val result = - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - _ <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query).excludes("intField")) - .documentAs[TestDocument] - } yield () - - assertZIO(result.exit)( - fails( - isSubtype[Exception]( - assertException("Could not parse all documents successfully: .intField(missing)") - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("fail if any of results cannot be decoded") { - checkOnce(genDocumentId, genDocumentId, genTestDocument, genTestSubDocument) { - (documentId, subDocumentId, document, subDocument) => - val result = - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](secondSearchIndex, documentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument](secondSearchIndex, subDocumentId, subDocument) - .refreshTrue - ) - query = range(TestDocument.intField).gte(0) - res <- Executor.execute(ElasticRequest.search(secondSearchIndex, query)).documentAs[TestDocument] - } yield res - - assertZIO(result.exit)( - fails( - isSubtype[Exception]( - assertException("Could not parse all documents successfully: .dateField(missing)") - ) - ) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document which contains a specific prefix using a prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.prefix( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a disjunction max query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"This is a ${firstDocument.stringField} test.") - secondDocumentUpdated = - secondDocument.copy(stringField = - s"This is a ${secondDocument.stringField} test. It should be in the list before ${firstDocument.stringField}, because it has higher relevance score than ${firstDocument.stringField}" - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) - ) - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - .refreshTrue - ) - query = disjunctionMax( - term( - field = TestDocument.stringField, - value = firstDocument.stringField.toLowerCase - ), - matchPhrase( - field = TestDocument.stringField, - value = secondDocument.stringField - ) - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a fuzzy query") { - checkOnce(genDocumentId, genTestDocument) { (firstDocumentId, firstDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument).refreshTrue - ) - query = ElasticQuery.fuzzy( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.substring(1) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield { - assert(res)(Assertion.contains(firstDocument)) - } - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which contains a term using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.contains( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which starts with a term using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.startsWith( - field = TestDocument.stringField.keyword, - value = firstDocument.stringField.take(3) - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document which conforms to a pattern using a wildcard query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = wildcard( - field = TestDocument.stringField.keyword, - value = s"${firstDocument.stringField.take(2)}*${firstDocument.stringField.takeRight(2)}" - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match all query with index pattern") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - firstDocumentCopy = firstDocument.copy(stringField = "this is test") - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) - .refreshTrue - ) - secondDocumentCopy = secondDocument.copy(stringField = "this is test") - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) - .refreshTrue - ) - query = matchAll - res <- Executor - .execute(ElasticRequest.search(IndexPattern("search-index*"), query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( - Assertion.contains(secondDocumentCopy) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document using a match boolean prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "test this is boolean") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchBooleanPrefix(TestDocument.stringField, "this is test bo") - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match phrase query with multi index") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - firstDocumentCopy = firstDocument.copy(stringField = "this is test") - _ <- - Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) - .refreshTrue - ) - secondDocumentCopy = secondDocument.copy(stringField = "this is test") - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) - .refreshTrue - ) - query = matchPhrase( - field = TestDocument.stringField, - value = firstDocumentCopy.stringField - ) - - res <- Executor - .execute(ElasticRequest.search(MultiIndex.names(firstSearchIndex, secondSearchIndex), query)) - .documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( - Assertion.contains(secondDocumentCopy) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for a document using a match phrase query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchPhrase( - field = TestDocument.stringField, - value = firstDocument.stringField - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(document)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a match phrase prefix query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = s"${firstDocument.stringField} test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = matchPhrasePrefix( - field = TestDocument.stringField, - value = s"${firstDocument.stringField} te" - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a multi match query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - document = firstDocument.copy(stringField = "test") - _ <- - Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - - query = - multiMatch(value = "test").fields(TestDocument.stringField).matchingType(BestFields) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") - secondDocumentUpdated = - secondDocument.copy(stringField = s"this is ${secondDocument.stringField} another test") - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) - ) - .refreshTrue - ) - query = terms( - field = TestDocument.stringField, - values = firstDocument.stringField.toLowerCase, - secondDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocumentUpdated, secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms set query with minimumShouldMatchField") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) - secondDocumentUpdated = - secondDocument.copy( - stringField = - s"this is ${secondDocument.stringField} another test, not ${firstDocument.stringField}", - intField = 2 - ) - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - .refreshTrue - ) - query = termsSet( - field = "stringField", - minimumShouldMatchField = "intField", - terms = secondDocument.stringField.toLowerCase, - firstDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using a terms set query with minimumShouldMatchScript") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - firstDocumentUpdated = - firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) - secondDocumentUpdated = - secondDocument.copy( - stringField = s"this is ${secondDocument.stringField} test, not ${firstDocument.stringField}", - intField = 2 - ) - _ <- - Executor.execute( - ElasticRequest - .bulk( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) - ) - .refreshTrue - ) - query = termsSetScript( - field = TestDocument.stringField, - minimumShouldMatchScript = Script("doc['intField'].value"), - terms = firstDocument.stringField.toLowerCase, - secondDocument.stringField.toLowerCase - ) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using nested query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - nested(path = TestDocument.subDocumentList, query = matchAll) - res <- - Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument, secondDocument))) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using should with satisfying minimumShouldMatch condition") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should( - matches(TestDocument.stringField, firstDocument.stringField), - matches(TestDocument.intField, firstDocument.intField), - matches(TestDocument.doubleField, firstDocument.doubleField + 1) - ).minimumShouldMatch(2) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using script query") { - checkN(4)(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery.script(Script("doc['booleanField'].value == true")) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument, secondDocument).filter(_.booleanField == true))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document that doesn't exist using regexp query without case insensitive ") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - ElasticQuery.regexp( - field = TestDocument.stringField, - value = - s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}".toUpperCase - ) - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield assert(res)(!Assertion.contains(firstDocument)) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using regexp query with case insensitive") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = ElasticQuery - .regexp( - field = TestDocument.stringField, - value = s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}" - ) - .caseInsensitiveTrue - res <- Executor - .execute(ElasticRequest.search(firstSearchIndex, query)) - .documentAs[TestDocument] - } yield (assert(res)(Assertion.contains(firstDocument)) && assert(res)( - !Assertion.contains(secondDocument) - )) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for a document using should with unsatisfying minimumShouldMatch condition") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should( - matches(TestDocument.stringField, firstDocument.stringField), - matches(TestDocument.intField, firstDocument.intField + 1), - matches(TestDocument.doubleField, firstDocument.doubleField + 1) - ).minimumShouldMatch(2) - res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] - } yield assert(res)(isEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents with inner hits")( - test("search for a document using nested query with inner hits") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = - nested(path = TestDocument.subDocumentList, query = matchAll).innerHits - result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) - items <- result.items - res = - items.map(_.innerHitAs[TestSubDocument]("subDocumentList")).collect { case Right(value) => value } - } yield assert(res)( - hasSameElements(List(firstDocument.subDocumentList, secondDocument.subDocumentList)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents with highlights")( - test("successfully find document with highlight") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) - ) - items <- res.items - } yield assert(items.map(_.highlight("stringField")))( - hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find inner hit document with highlight") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- - Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = nested( - path = TestDocument.subDocumentList, - query = must( - matches( - TestSubDocument.stringField, - secondDocument.subDocumentList.headOption.map(_.stringField).getOrElse("foo") - ) - ) - ).innerHits( - InnerHits().highlights(highlight(TestSubDocument.stringField)) - ) - result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) - items <- result.items - res = items - .flatMap(_.innerHit("subDocumentList")) - .flatten - .flatMap(_.highlight("subDocumentList.stringField")) - .flatten - } yield assert(res)( - Assertion.contains( - secondDocument.subDocumentList.headOption - .map(doc => s"${doc.stringField}") - .getOrElse("foo") - ) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight using field accessor") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight(TestDocument.stringField)) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlights and return highlights map successfully") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) - ) - items <- res.items - } yield assert(items.map(_.highlights))( - hasSameElements(List(Some(Map("stringField" -> Chunk(s"${firstDocument.stringField}"))))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight while using global config") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest - .search(firstSearchIndex, query) - .highlights( - highlight(TestDocument.stringField) - .withGlobalConfig("pre_tags", Arr(Str("
    "))) - .withGlobalConfig("post_tags", Arr(Str("
"))) - ) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("successfully find document with highlight while using local config to overwrite global config") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = should(matches("stringField", firstDocument.stringField)) - res <- - Executor.execute( - ElasticRequest - .search(firstSearchIndex, query) - .highlights( - highlight( - TestDocument.stringField, - config = Map("pre_tags" -> Arr(Str("
    ")), "post_tags" -> Arr(Str("
"))) - ) - .withGlobalConfig("pre_tags", Arr(Str("
    "))) - .withGlobalConfig("post_tags", Arr(Str("
"))) - ) - ) - items <- res.items - } yield assert(items.map(_.highlight(TestDocument.stringField)))( - hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ), - suite("searching for sorted documents")( - test("search for document sorted by descending age and by ascending birthDate using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val firstDocumentWithFixedIntField = - firstDocument.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) - val secondDocumentWithFixedIntField = - secondDocument.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentWithFixedIntField - ) - .refreshTrue - ) - query = range(TestDocument.intField).gte(20) - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort( - sortBy(TestDocument.intField).order(Desc), - sortBy(TestDocument.dateField).order(Asc).format("strict_date_optional_time_nanos") - ) - ) - .documentAs[TestDocument] - } yield assert(res)( - equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for document sorted by script where age is ascending using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstEmployee, secondDocumentId, secondEmployee) => - val firstDocumentWithFixedIntField = - firstEmployee.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) - val secondDocumentWithFixedIntField = - secondEmployee.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - firstSearchIndex, - secondDocumentId, - secondDocumentWithFixedIntField - ) - .refreshTrue - ) - query = range(TestDocument.intField).gte(20) - res <- - Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort(sortBy(Script("doc['intField'].value").lang(Painless), NumberType).order(Asc)) - ) - .documentAs[TestDocument] - } yield assert(res)( - equalTo(Chunk(firstDocumentWithFixedIntField, secondDocumentWithFixedIntField)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for document sorted descending with 'max' mode by one field using matchAll query") { - checkOnce(genDocumentId, genTestSubDocument, genDocumentId, genTestSubDocument) { - (firstDocumentId, firstSubDocument, secondDocumentId, secondSubDocument) => - val firstSubDocumentWithFixedIntList = firstSubDocument.copy(intFieldList = List(11, 4, 37)) - val secondSubDocumentWithFixedIntList = secondSubDocument.copy(intFieldList = List(30, 29, 35)) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument](firstSearchIndex, firstDocumentId, firstSubDocumentWithFixedIntList) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestSubDocument]( - firstSearchIndex, - secondDocumentId, - secondSubDocumentWithFixedIntList - ) - .refreshTrue - ) - query = matchAll - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .sort(sortBy(TestSubDocument.intFieldList).mode(Max).order(Desc)) - ) - .documentAs[TestSubDocument] - } yield assert(res)( - equalTo(Chunk(firstSubDocumentWithFixedIntList, secondSubDocumentWithFixedIntList)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using scroll API and returning them as a stream")( - test("search for documents using range query") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => - val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) - } yield assert(res)(isNonEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("search for documents using range query with multiple pages") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 203).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream( - ElasticRequest.search(secondSearchIndex, query) - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(204))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for documents using range query with multiple pages and return type") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = - ZSink.collectAll[TestDocument] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .streamAs[TestDocument](ElasticRequest.search(secondSearchIndex, query)) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("search for documents using range query - empty stream") { - val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) - } yield assert(res)(hasSize(equalTo(0))) - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using PIT (point in time) and returning them as a stream")( - test("successfully create PIT and return stream results") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test( - "successfully create PIT and return stream results with changed page size and different keep alive parameters" - ) { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .stream( - ElasticRequest.search(secondSearchIndex, query), - StreamConfig.SearchAfter.withPageSize(40).keepAliveFor("2m") - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("successfully create PIT(point in time) and return stream results as specific type") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = - ZSink.collectAll[TestDocument] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - reqs = (0 to 200).map { _ => - ElasticRequest.create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.doubleField).gte(100.0) - res <- Executor - .streamAs[TestDocument]( - ElasticRequest.search(secondSearchIndex, query), - StreamConfig.SearchAfter - ) - .run(sink) - } yield assert(res)(hasSize(equalTo(201))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ), - test("successfully create point in time and return empty stream if there is no valid results") { - checkOnce(genTestDocument) { document => - def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = - ZSink.collectAll[Item] - - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - secondSearchIndex, - document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) - ) - .refreshTrue - ) - query = range(TestDocument.doubleField).gte(200.0) - res <- Executor - .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) - .run(sink) - } yield assert(res)(isEmpty) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie - ) - ) @@ shrinks(0), - suite("searching for documents using SearchAfter Query")( - test("search for document sorted by ascending age while using search after query") { - checkOnce(genTestDocument) { firstDocument => - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - reqs = (0 to 100).map { i => - ElasticRequest.create[TestDocument]( - firstSearchIndex, - firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) - ) - } - _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) - query = range(TestDocument.intField).gte(10) - res <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - sa <- res.lastSortValue - res2 <- Executor - .execute( - ElasticRequest - .search(firstSearchIndex, query) - .searchAfter(sa.get) - .size(10) - .sort( - sortBy(TestDocument.intField).order(Asc) - ) - ) - .documentAs[TestDocument] - } yield assert(res2.map(_.intField))( - equalTo(Chunk.fromIterable(20 to 29)) - ) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) @@ shrinks(0) - ), - suite("deleting by query")( - test("successfully delete all matched documents") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { - (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => - for { - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - firstDocumentId, - firstDocument.copy(doubleField = 150) - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - secondDocumentId, - secondDocument.copy(doubleField = 350) - ) - ) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument]( - deleteByQueryIndex, - thirdDocumentId, - thirdDocument.copy(doubleField = 400) - ) - .refreshTrue - ) - deleteQuery = range(TestDocument.doubleField).gte(300.0) - _ <- Executor - .execute(ElasticRequest.deleteByQuery(deleteByQueryIndex, deleteQuery).refreshTrue) - res <- Executor - .execute(ElasticRequest.search(deleteByQueryIndex, matchAll)) - .documentAs[TestDocument] - } yield assert(res)(hasSameElements(List(firstDocument.copy(doubleField = 150)))) - } - } @@ around( - Executor.execute(ElasticRequest.createIndex(deleteByQueryIndex)), - Executor.execute(ElasticRequest.deleteIndex(deleteByQueryIndex)).orDie - ), - test("returns NotFound when provided index is missing") { - checkOnce(genIndexName) { missingIndex => - assertZIO(Executor.execute(ElasticRequest.deleteByQuery(missingIndex, matchAll)))( - equalTo(DeletionOutcome.NotFound) - ) - } - } - ), - suite("bulk query")( - test("successfully execute bulk query") { - checkOnce(genDocumentId, genDocumentId, genDocumentId, genTestDocument) { - (firstDocumentId, secondDocumentId, thirdDocumentId, document) => - for { - _ <- Executor.execute( - ElasticRequest - .create[TestDocument](index, firstDocumentId, document.copy(stringField = "randomIdString")) - ) - _ <- - Executor.execute( - ElasticRequest - .create[TestDocument](index, secondDocumentId, document.copy(stringField = "randomIdString2")) - .refreshTrue - ) - req1 = ElasticRequest.create[TestDocument](index, thirdDocumentId, document) - req2 = ElasticRequest.create[TestDocument](index, document.copy(stringField = "randomIdString3")) - req3 = ElasticRequest.upsert[TestDocument](index, firstDocumentId, document.copy(doubleField = 3000)) - req4 = ElasticRequest.deleteById(index, secondDocumentId) - req5 = ElasticRequest.update[TestDocument](index, thirdDocumentId, document.copy(intField = 100)) - req6 = ElasticRequest.updateByScript( - index, - firstDocumentId, - Script("ctx._source.intField = params['factor']").params("factor" -> 100) - ) - req7 = - ElasticRequest - .update[TestDocument](index, DocumentId("invalid-document-id"), document.copy(intField = 100)) - res <- - Executor.execute(ElasticRequest.bulk(req1, req2, req3, req4, req5, req6, req7).refreshTrue) - doc1 <- Executor.execute(ElasticRequest.getById(index, firstDocumentId)).documentAs[TestDocument] - doc2 <- Executor.execute(ElasticRequest.getById(index, secondDocumentId)).documentAs[TestDocument] - doc3 <- Executor.execute(ElasticRequest.getById(index, thirdDocumentId)).documentAs[TestDocument] - } yield assert(res.items.size)(equalTo(7)) && - assert(res.items.map(_.error.isDefined))( - equalTo(Chunk(false, false, false, false, false, false, true)) - ) && - assert(res.items(6).status)(equalTo(Some(404))) && - assert(res.items(6).error.map(_.`type`))(equalTo(Some("document_missing_exception"))) && - assert(doc3)(isSome(equalTo(document.copy(intField = 100)))) && - assert(doc2)(isNone) && assert(doc1)( - isSome(equalTo(document.copy(doubleField = 3000, intField = 100))) - ) - } - } - ), - suite("updating document")( - test("successfully update document with script") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val intField = document.intField - val factor = 2 - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) - _ <- Executor.execute( - ElasticRequest.updateByScript( - index, - documentId, - Script("ctx._source.intField += params['factor']").params("factor" -> factor) - ) - ) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document.copy(intField = intField + factor)))) - } - }, - test("successfully create document if it does not exist") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - for { - _ <- Executor.execute( - ElasticRequest - .updateByScript( - index, - documentId, - Script("ctx._source.intField += params['factor']").params("factor" -> 2) - ) - .orCreate(document) - ) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(document))) - } - }, - test("successfully update document with doc") { - checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => - for { - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) - _ <- Executor.execute(ElasticRequest.update[TestDocument](index, documentId, secondDocument)) - doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] - } yield assert(doc)(isSome(equalTo(secondDocument))) - } - } - ), - suite("updating document by query")( - test("successfully update document with only script") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val stringField = "StringField" - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, document).refreshTrue - ) - updateRes <- - Executor.execute( - ElasticRequest - .updateAllByQuery( - updateByQueryIndex, - Script("ctx._source['stringField'] = params['str']").params("str" -> stringField) - ) - .refreshTrue - ) - doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] - } yield assert(updateRes)( - equalTo( - UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) - ) - ) && assert(doc)(isSome(equalTo(document.copy(stringField = stringField)))) - } - }, - test("successfully update document with script and query") { - checkOnce(genDocumentId, genTestDocument) { (documentId, document) => - val newDocument = document.copy(stringField = "StringField") - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) - _ <- Executor.execute( - ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, newDocument).refreshTrue - ) - updateRes <- - Executor.execute( - ElasticRequest - .updateByQuery( - index = updateByQueryIndex, - query = term(field = TestDocument.stringField.keyword, value = "StringField"), - script = Script("ctx._source['intField']++") - ) - .refreshTrue - ) - doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] - } yield assert(updateRes)( - equalTo( - UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) - ) - ) && assert(doc)(isSome(equalTo(newDocument.copy(intField = newDocument.intField + 1)))) - } - } - ), - suite("geo-distance query")( - test("using geo-distance query") { - checkOnce(genTestDocument) { document => - val indexDefinition = - """ - |{ - | "mappings": { - | "properties": { - | "geoPointField": { - | "type": "geo_point" - | } - | } - | } - |} - |""".stripMargin - - for { - _ <- Executor.execute(ElasticRequest.createIndex(geoDistanceIndex, indexDefinition)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(geoDistanceIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](geoDistanceIndex, document).refreshTrue - ) - result <- Executor - .execute( - ElasticRequest.search( - geoDistanceIndex, - ElasticQuery - .geoDistance( - "geoPointField", - GeoPoint(document.geoPointField.lat, document.geoPointField.lon), - Distance(300, Kilometers) - ) - ) - ) - .documentAs[TestDocument] - } yield assert(result)(equalTo(Chunk(document))) - } - } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoDistanceIndex)).orDie) - ), - suite("geo-polygon query")( - test("using geo-polygon query") { - checkOnce(genTestDocument) { document => - val indexDefinition = - """ - |{ - | "mappings": { - | "properties": { - | "geoPointField": { - | "type": "geo_point" - | } - | } - | } - |} - |""".stripMargin - - for { - _ <- Executor.execute(ElasticRequest.createIndex(geoPolygonIndex, indexDefinition)) - _ <- Executor.execute(ElasticRequest.deleteByQuery(geoPolygonIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](geoPolygonIndex, document).refreshTrue - ) - - r1 <- Executor - .execute( - ElasticRequest.search( - geoPolygonIndex, - ElasticQuery - .geoPolygon("geoPointField", Chunk("0, 0", "0, 90", "90, 90", "90, 0")) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)(equalTo(Chunk(document))) - } - } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoPolygonIndex)).orDie) - ), - suite("search for documents using FunctionScore query")( - test("using randomScore function") { - checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => - val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue - ) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - firstSearchIndex, - secondDocumentUpdated - ) - .refreshTrue - ) - r1 <- Executor - .execute( - ElasticRequest.search( - firstSearchIndex, - ElasticQuery - .functionScore(randomScoreFunction()) - .query(matches("stringField", firstDocument.stringField)) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)( - hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ), - test("using randomScore function and weight function") { - checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => - val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute( - ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue - ) - _ <- Executor.execute( - ElasticRequest - .create[TestDocument]( - firstSearchIndex, - secondDocumentUpdated - ) - .refreshTrue - ) - r1 <- Executor - .execute( - ElasticRequest.search( - firstSearchIndex, - ElasticQuery - .functionScore( - FunctionScoreFunction.randomScoreFunction(), - FunctionScoreFunction.weightFunction(2) - ) - .query(matches("stringField", firstDocument.stringField)) - .boost(2.0) - .boostMode(FunctionScoreBoostMode.Max) - ) - ) - .documentAs[TestDocument] - } yield assert(r1)( - hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) - ) - } - } @@ around( - Executor.execute( - ElasticRequest.createIndex( - firstSearchIndex, - """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" - ) - ), - Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie - ) - ) - ) @@ nondeterministic @@ sequential @@ prepareElasticsearchIndexForTests @@ afterAll( - Executor.execute(ElasticRequest.deleteIndex(index)).orDie - ) - ).provideShared( - elasticsearchLayer - ) - } -} +/* + * Copyright 2022 LambdaWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package zio.elasticsearch + +import zio.Chunk +import zio.elasticsearch.ElasticAggregation._ +import zio.elasticsearch.ElasticHighlight.highlight +import zio.elasticsearch.ElasticQuery.{script => _, _} +import zio.elasticsearch.ElasticSort.sortBy +import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} +import zio.elasticsearch.data.GeoPoint +import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} +import zio.elasticsearch.executor.Executor +import zio.elasticsearch.query.DistanceUnit.Kilometers +import zio.elasticsearch.query.FunctionScoreFunction.randomScoreFunction +import zio.elasticsearch.query.MultiMatchType._ +import zio.elasticsearch.query.sort.SortMode.Max +import zio.elasticsearch.query.sort.SortOrder._ +import zio.elasticsearch.query.sort.SourceType.NumberType +import zio.elasticsearch.query.{Distance, FunctionScoreBoostMode, FunctionScoreFunction, InnerHits} +import zio.elasticsearch.request.{CreationOutcome, DeletionOutcome} +import zio.elasticsearch.result.{FilterAggregationResult, Item, MaxAggregationResult, UpdateByQueryResult} +import zio.elasticsearch.script.{Painless, Script} +import zio.json.ast.Json.{Arr, Str} +import zio.schema.codec.JsonCodec +import zio.stream.{Sink, ZSink} +import zio.test.Assertion._ +import zio.test.TestAspect._ +import zio.test._ + +import java.time.LocalDate +import scala.util.Random + +object HttpExecutorSpec extends IntegrationSpec { + + def spec: Spec[TestEnvironment, Any] = { + suite("Executor")( + suite("HTTP Executor")( + suite("aggregation")( + test("aggregate using avg aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(doubleField = 20)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(doubleField = 10)) + .refreshTrue + ) + aggregation = avgAggregation(name = "aggregationDouble", field = TestDocument.doubleField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asAvgAggregation("aggregationDouble") + } yield assert(aggsRes.head.value)(equalTo(15.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using cardinality aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 10)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 20)) + .refreshTrue + ) + aggregation = cardinalityAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + + cardinalityAgg <- aggsRes.asCardinalityAggregation("aggregationInt") + } yield assert(cardinalityAgg.map(_.value))(isSome(equalTo(2))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using extended stats aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 100)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 50)) + .refreshTrue + ) + aggregation = extendedStatsAggregation(name = "aggregation", field = TestDocument.intField).sigma(3) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asExtendedStatsAggregation("aggregation") + } yield assert(aggsRes.head.count)(equalTo(2)) && + assert(aggsRes.head.min)(equalTo(50.0)) && + assert(aggsRes.head.max)(equalTo(100.0)) && + assert(aggsRes.head.avg)(equalTo(75.0)) && + assert(aggsRes.head.sum)(equalTo(150.0)) && + assert(aggsRes.head.sumOfSquares)(equalTo(12500.0)) && + assert(aggsRes.head.variance)(equalTo(625.0)) && + assert(aggsRes.head.variancePopulation)(equalTo(625.0)) && + assert(aggsRes.head.varianceSampling)(equalTo(1250.0)) && + assert(aggsRes.head.stdDeviation)(equalTo(25.0)) && + assert(aggsRes.head.stdDeviationPopulation)(equalTo(25.0)) && + assert(aggsRes.head.stdDeviationSampling)(equalTo(35.35533905932738)) && + assert(aggsRes.head.stdDeviationBoundsResult.upper)(equalTo(150.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.lower)(equalTo(0.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.upperPopulation)(equalTo(150.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.lowerPopulation)(equalTo(0.0)) && + assert(aggsRes.head.stdDeviationBoundsResult.upperSampling)(equalTo(181.06601717798213)) && + assert(aggsRes.head.stdDeviationBoundsResult.lowerSampling)(equalTo(-31.066017177982133)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using filter aggregation with max aggregation as a sub aggregation") { + val expectedResult = ( + "aggregation", + FilterAggregationResult( + docCount = 2, + subAggregations = Map( + "subAggregation" -> MaxAggregationResult(value = 5.0) + ) + ) + ) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(stringField = "test", intField = 7) + secondDocumentUpdated = + secondDocument.copy(stringField = "filterAggregation", intField = 3) + thirdDocumentUpdated = + thirdDocument.copy(stringField = "filterAggregation", intField = 5) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocumentUpdated + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentUpdated + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + thirdDocumentId, + thirdDocumentUpdated + ) + .refreshTrue + ) + query = term(field = TestDocument.stringField, value = secondDocumentUpdated.stringField.toLowerCase) + aggregation = + filterAggregation(name = "aggregation", query = query).withSubAgg( + maxAggregation("subAggregation", TestDocument.intField) + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + + } yield assert(aggsRes.head)(equalTo(expectedResult)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using max aggregation") { + val expectedResponse = ("aggregationInt", MaxAggregationResult(value = 20.0)) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 20)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 10)) + .refreshTrue + ) + aggregation = maxAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes.head)(equalTo(expectedResponse)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using min aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) + .refreshTrue + ) + aggregation = minAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asMinAggregation("aggregationInt") + } yield assert(aggsRes.head.value)(equalTo(23.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentile ranks aggregation") { + val expectedResult = Map("500.0" -> 55.55555555555555, "600.0" -> 100.0) + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 400)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 500)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 550)) + .refreshTrue + ) + aggregation = + percentileRanksAggregation(name = "aggregation", field = "intField", value = 500.0, values = 600.0) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asPercentileRanksAggregation("aggregation") + } yield assert(aggsRes.head.values)(equalTo(expectedResult)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation with multi index") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- Executor + .execute( + ElasticRequest.aggregate( + selectors = MultiIndex.names(firstSearchIndex, secondSearchIndex), + aggregation = aggregation + ) + ) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation with index pattern") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + percentilesAggregation(name = "aggregationInt", field = TestDocument.intField).percents(25, 50, 90) + aggsRes <- Executor + .execute( + ElasticRequest.aggregate( + selectors = IndexPatternAll, + aggregation = aggregation + ) + ) + .asPercentilesAggregation("aggregationInt") + } yield assert(aggsRes.head.values.size)(equalTo(3)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("aggregate using percentiles aggregation as sub aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + termsAggregation(name = "first", field = TestDocument.stringField.keyword) + .withSubAgg(percentilesAggregation(name = "second", field = TestSubDocument.intField)) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using stats aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 7)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 6)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument.copy(intField = 10)) + .refreshTrue + ) + aggregation = statsAggregation(name = "aggregation", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asStatsAggregation("aggregation") + } yield assert(aggsRes.head.count)(equalTo(3)) && + assert(aggsRes.head.min)(equalTo(6.0)) && + assert(aggsRes.head.max)(equalTo(10.0)) && + assert(aggsRes.head.avg)(equalTo(7.666666666666667)) && + assert(aggsRes.head.sum)(equalTo(23.0)) + + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using sum aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 200)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 23)) + .refreshTrue + ) + aggregation = sumAggregation(name = "aggregationInt", field = TestDocument.intField) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asSumAggregation("aggregationInt") + } yield assert(aggsRes.head.value)(equalTo(223.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using ip range aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => + val updatedA = docA.copy(stringField = "192.168.1.10") + val updatedB = docB.copy(stringField = "192.168.1.200") + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) + .refreshTrue + ) + + aggregation = IpRange( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("192.168.1.100")), + IpRange.IpRangeBound( + from = Some("192.168.1.100"), + to = Some("192.168.1.255") + ) + ), + keyed = None, + subAggregations = None + ) + + result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + agg <- result.aggregation("ip_range_agg") + } yield assertTrue(agg.nonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using ip range aggregation with CIDR masks") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docId1, doc1, docId2, doc2) => + val updated1 = doc1.copy(stringField = "10.0.0.10") + val updated2 = doc2.copy(stringField = "10.0.0.120") + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docId1, updated1)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, docId2, updated2) + .refreshTrue + ) + + aggregation = IpRange( + name = "cidr_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), + IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) + ), + keyed = None, + subAggregations = None + ) + + result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + agg <- result.aggregation("cidr_agg") + } yield assertTrue(agg.nonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using terms aggregation with max aggregation as a sub aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = + termsAggregation(name = "aggregationString", field = TestDocument.stringField.keyword).withSubAgg( + maxAggregation("subAggregation", TestDocument.intField) + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using missing aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = multipleAggregations.aggregations( + missingAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ), + missingAggregation(name = "aggregationString", field = "stringField.keyword") + ) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using multiple terms aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + aggregation = multipleAggregations.aggregations( + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ), + termsAggregation(name = "aggregationInt", field = "intField.keyword") + ) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + .aggregations + } yield assert(aggsRes)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using terms aggregation with nested max aggregation and bucket sort aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 2)) + .refreshTrue + ) + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).orderBy(AggregationOrder("aggregationInt", Desc)) + .withSubAgg(maxAggregation(name = "aggregationInt", field = "intField")) + .withSubAgg( + bucketSortAggregation("aggregationBucket").sort( + ElasticSort.sortBy("aggregationInt").order(Desc) + ) + ) + .size(1) + aggsRes <- Executor + .execute( + ElasticRequest + .aggregate(selectors = firstSearchIndex, aggregation = aggregation) + ) + agg <- aggsRes.asTermsAggregation("aggregationString") + } yield assert(agg.map(_.buckets.size))(isSome(equalTo(1))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using value count aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocument.copy(stringField = "test") + ) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocument.copy(stringField = "test") + ) + .refreshTrue + ) + aggregation = valueCountAggregation(name = "aggregation", field = TestDocument.stringField.keyword) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asValueCountAggregation("aggregation") + + } yield assert(aggsRes.head.value)(equalTo(2)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("aggregate using weighted avg aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + firstDocumentId, + firstDocument.copy(doubleField = 5, intField = 2) + ) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocument.copy(doubleField = 10, intField = 3) + ) + .refreshTrue + ) + aggregation = weightedAvgAggregation( + name = "weightedAggregation", + valueField = TestDocument.doubleField, + weightField = TestDocument.intField + ) + aggsRes <- + Executor + .execute(ElasticRequest.aggregate(selectors = firstSearchIndex, aggregation = aggregation)) + .asWeightedAvgAggregation("weightedAggregation") + } yield assert(aggsRes.head.value)(equalTo(8.0)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("search with aggregation")( + test("search for first result using match all query with multiple terms aggregations") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchAll + aggregation = termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField")) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + .from(0) + .size(1) + ) + docs <- res.documentAs[TestDocument] + aggs <- res.aggregations + } yield assert(docs.length)(equalTo(1)) && assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test( + "search for first result using match all query with multiple terms aggregations and search after parameter" + ) { + checkOnce(genTestDocument) { firstDocument => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + reqs = (0 to 20).map { i => + ElasticRequest.create[TestDocument]( + firstSearchIndex, + firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = matchAll + aggregation = termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField")) + res <- Executor + .execute( + ElasticRequest + .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + sa <- res.lastSortValue + res2 <- Executor + .execute( + ElasticRequest + .search(selectors = firstSearchIndex, query = query, aggregation = aggregation) + .searchAfter(sa.get) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + docs <- res2.documentAs[TestDocument] + aggs <- res2.aggregations + } yield assert(docs.length)(equalTo(10)) && assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search using match all query with multiple terms aggregations with descending sort on one field") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val firstDocumentWithFixedIntField = firstDocument.copy(intField = 25) + val secondDocumentWithFixedIntField = secondDocument.copy(intField = 32) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentWithFixedIntField) + .refreshTrue + ) + query = matchAll + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withAgg(termsAggregation("aggregationInt", "intField.keyword")) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + .sort(sortBy(field = TestDocument.intField).order(Desc)) + ) + docs <- res.documentAs[TestDocument] + aggs <- res.aggregations + } yield assert(docs)(equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField))) && + assert(aggs)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test( + "search using match all query with terms aggregations, nested max aggregation and nested bucketSelector aggregation" + ) { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument.copy(intField = 5)) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument.copy(intField = 100)) + .refreshTrue + ) + query = matchAll + aggregation = + termsAggregation( + name = "aggregationString", + field = TestDocument.stringField.keyword + ).withSubAgg(maxAggregation(name = "aggregationInt", field = TestDocument.intField)) + .withSubAgg( + bucketSelectorAggregation( + name = "aggregationSelector", + script = Script("params.aggregation_int > 10"), + bucketsPath = Map("aggregation_int" -> "aggregationInt") + ) + ) + res <- Executor.execute( + ElasticRequest + .search( + selectors = firstSearchIndex, + query = query, + aggregation = aggregation + ) + ) + docs <- res.documentAs[TestDocument] + termsAgg <- res.asTermsAggregation("aggregationString") + } yield assert(docs)(isNonEmpty) && assert( + termsAgg.map(_.buckets.size) + )(isSome(equalTo(1))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("counting documents")( + test("successfully count documents with given query") { + checkOnce(genTestDocument) { document => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstCountIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstCountIndex, document).refreshTrue + ) + res <- Executor.execute(ElasticRequest.count(firstCountIndex, matchAll)) + } yield assert(res)(equalTo(1)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstCountIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstCountIndex)).orDie + ), + test("successfully count documents without given query") { + checkOnce(genTestDocument) { document => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondCountIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](secondCountIndex, document).refreshTrue + ) + res <- Executor.execute(ElasticRequest.count(secondCountIndex)) + } yield assert(res)(equalTo(1)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondCountIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondCountIndex)).orDie + ) + ) @@ shrinks(0), + suite("creating document")( + test("successfully create document") { + checkOnce(genTestDocument) { document => + for { + docId <- Executor.execute(ElasticRequest.create[TestDocument](index, document)) + res <- Executor.execute(ElasticRequest.getById(index, docId)).documentAs[TestDocument] + } yield assert(res)(isSome(equalTo(document))) + } + }, + test("successfully create document with ID given") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + assertZIO(Executor.execute(ElasticRequest.create[TestDocument](index, documentId, document)))( + equalTo(CreationOutcome.Created) + ) + } + }, + test("return 'AlreadyExists' if document with given ID already exists") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) + res <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, secondDocument)) + } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) + } + } + ), + suite("creating index")( + test("successfully create index") { + assertZIO(Executor.execute(ElasticRequest.createIndex(createIndexTestName)))( + equalTo(CreationOutcome.Created) + ) + }, + test("return 'AlreadyExists' if index already exists") { + for { + _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + res <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + } yield assert(res)(equalTo(CreationOutcome.AlreadyExists)) + } + ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), + suite("creating or updating document")( + test("successfully create document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document))) + } + }, + test("successfully update document") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.create[TestDocument](index, documentId, firstDocument)) + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, secondDocument)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(secondDocument))) + } + } + ), + suite("deleting document by ID")( + test("successfully delete existing document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.deleteById(index, documentId)) + } yield assert(res)(equalTo(DeletionOutcome.Deleted)) + } + }, + test("return 'NotFound' if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.deleteById(index, documentId)))( + equalTo(DeletionOutcome.NotFound) + ) + } + } + ), + suite("delete index")( + test("successfully delete existing index") { + checkOnce(genIndexName) { name => + for { + _ <- Executor.execute(ElasticRequest.createIndex(name)) + res <- Executor.execute(ElasticRequest.deleteIndex(name)) + } yield assert(res)(equalTo(DeletionOutcome.Deleted)) + } + }, + test("return 'NotFound' if index does not exists") { + checkOnce(genIndexName) { name => + assertZIO(Executor.execute(ElasticRequest.deleteIndex(name)))(equalTo(DeletionOutcome.NotFound)) + } + } + ), + suite("finding document")( + test("return true if the document exists") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.exists(index, documentId)) + } yield assert(res)(isTrue) + } + }, + test("return false if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.exists(index, documentId)))(isFalse) + } + } + ), + suite("retrieving document by ID")( + test("successfully return document") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(res)(isSome(equalTo(document))) + } + }, + test("return None if the document does not exist") { + checkOnce(genDocumentId) { documentId => + assertZIO(Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument])(isNone) + } + }, + test("fail with throwable if decoding fails") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val result = for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + res <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestSubDocument] + } yield res + + assertZIO(result.exit)( + fails(isSubtype[Exception](assertException("Could not parse the document: .nestedField(missing)"))) + ) + } + } + ), + suite("refresh index")( + test("successfully refresh existing index") { + assertZIO(Executor.execute(ElasticRequest.refresh(index)))(isTrue) + }, + test("successfully refresh more existing indices") { + for { + _ <- Executor.execute(ElasticRequest.createIndex(createIndexTestName)) + res <- Executor.execute(ElasticRequest.refresh(MultiIndex.names(index, createIndexTestName))) + } yield assert(res)(isTrue) + }, + test("successfully refresh all indices") { + assertZIO(Executor.execute(ElasticRequest.refresh(IndexPatternAll)))(isTrue) + }, + test("return false if index does not exists") { + assertZIO(Executor.execute(ElasticRequest.refresh(refreshFailIndex)))(isFalse) + } + ) @@ after(Executor.execute(ElasticRequest.deleteIndex(createIndexTestName)).orDie), + suite("retrieving document by IDs")( + test("find documents by ids") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ids(firstDocumentId.toString, secondDocumentId.toString) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query) + ) + items <- res.items + } yield assert(items != null)(isTrue) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("kNN search")( + test("search for top two results") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(vectorField = List(1, 5, -20)) + secondDocumentUpdated = secondDocument.copy(vectorField = List(42, 8, -15)) + thirdDocumentUpdated = thirdDocument.copy(vectorField = List(15, 11, 23)) + req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) + req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) + req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) + query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) + res <- Executor.execute(ElasticRequest.knnSearch(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(firstDocumentUpdated, thirdDocumentUpdated))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for top two results with filters") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(intField = 15, vectorField = List(1, 5, -20)) + secondDocumentUpdated = secondDocument.copy(intField = 21, vectorField = List(42, 8, -15)) + thirdDocumentUpdated = thirdDocument.copy(intField = 4, vectorField = List(15, 11, 23)) + req1 = ElasticRequest.create(firstSearchIndex, firstDocumentId, firstDocumentUpdated) + req2 = ElasticRequest.create(firstSearchIndex, secondDocumentId, secondDocumentUpdated) + req3 = ElasticRequest.create(firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + _ <- Executor.execute(ElasticRequest.bulk(req1, req2, req3).refreshTrue) + query = ElasticQuery.kNN(TestDocument.vectorField, 2, 3, Chunk(-5.0, 9.0, -12.0)) + filter = ElasticQuery.range(TestDocument.intField).gt(10) + res <- Executor + .execute(ElasticRequest.knnSearch(firstSearchIndex, query).filter(filter)) + .documentAs[TestDocument] + } yield (assert(res)(equalTo(Chunk(firstDocumentUpdated, secondDocumentUpdated)))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "vectorField": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", "index": true } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents")( + test("search for a document using a boosting query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is a ${firstDocument.stringField} test", intField = 7) + secondDocumentUpdated = + secondDocument.copy( + stringField = s"this is another ${secondDocument.stringField} test", + intField = 5 + ) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + .refreshTrue + ) + query = boosting( + negativeBoost = 0.1f, + negativeQuery = + term(field = TestDocument.stringField, value = firstDocument.stringField.toLowerCase), + positiveQuery = matchPhrase( + field = TestDocument.stringField, + value = "test" + ) + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated)))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a constant score query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "this is a test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = constantScore( + matchPhrase( + field = TestDocument.stringField, + value = "test" + ) + ).boost(2.1) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for first 2 documents using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).from(0).size(2)) + .documentAs[TestDocument] + } yield assert(res.length)(equalTo(2)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for first 2 documents using range query with date format") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + val firstDocumentUpdated = firstDocument.copy(dateField = LocalDate.now.minusDays(2)) + val secondDocumentUpdated = secondDocument.copy(dateField = LocalDate.now) + val thirdDocumentUpdated = thirdDocument.copy(dateField = LocalDate.now.plusDays(2)) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocumentUpdated) + .refreshTrue + ) + query = range(TestDocument.dateField).gte(LocalDate.now).format("yyyy-MM-dd").boost(1.0) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, thirdDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for documents with source filtering") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).includes[PartialTestDocument]) + items <- res.items + } yield assert(items.map(item => Right(item.raw)))( + hasSameElements( + List(firstDocument, secondDocument, thirdDocument).map(document => + TestDocument.schema.migrate(PartialTestDocument.schema).flatMap(_(document)).flatMap { + partialDocument => + JsonCodec.jsonEncoder(PartialTestDocument.schema).toJsonAST(partialDocument) + } + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("fail if an excluded source field is attempted to be decoded") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + val result = + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + _ <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query).excludes("intField")) + .documentAs[TestDocument] + } yield () + + assertZIO(result.exit)( + fails( + isSubtype[Exception]( + assertException("Could not parse all documents successfully: .intField(missing)") + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("fail if any of results cannot be decoded") { + checkOnce(genDocumentId, genDocumentId, genTestDocument, genTestSubDocument) { + (documentId, subDocumentId, document, subDocument) => + val result = + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](secondSearchIndex, documentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument](secondSearchIndex, subDocumentId, subDocument) + .refreshTrue + ) + query = range(TestDocument.intField).gte(0) + res <- Executor.execute(ElasticRequest.search(secondSearchIndex, query)).documentAs[TestDocument] + } yield res + + assertZIO(result.exit)( + fails( + isSubtype[Exception]( + assertException("Could not parse all documents successfully: .dateField(missing)") + ) + ) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document which contains a specific prefix using a prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.prefix( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a disjunction max query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"This is a ${firstDocument.stringField} test.") + secondDocumentUpdated = + secondDocument.copy(stringField = + s"This is a ${secondDocument.stringField} test. It should be in the list before ${firstDocument.stringField}, because it has higher relevance score than ${firstDocument.stringField}" + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated) + ) + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + .refreshTrue + ) + query = disjunctionMax( + term( + field = TestDocument.stringField, + value = firstDocument.stringField.toLowerCase + ), + matchPhrase( + field = TestDocument.stringField, + value = secondDocument.stringField + ) + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(equalTo(Chunk(secondDocumentUpdated, firstDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a fuzzy query") { + checkOnce(genDocumentId, genTestDocument) { (firstDocumentId, firstDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument).refreshTrue + ) + query = ElasticQuery.fuzzy( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.substring(1) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield { + assert(res)(Assertion.contains(firstDocument)) + } + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which contains a term using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.contains( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which starts with a term using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.startsWith( + field = TestDocument.stringField.keyword, + value = firstDocument.stringField.take(3) + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document which conforms to a pattern using a wildcard query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = wildcard( + field = TestDocument.stringField.keyword, + value = s"${firstDocument.stringField.take(2)}*${firstDocument.stringField.takeRight(2)}" + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match all query with index pattern") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + firstDocumentCopy = firstDocument.copy(stringField = "this is test") + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) + .refreshTrue + ) + secondDocumentCopy = secondDocument.copy(stringField = "this is test") + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) + .refreshTrue + ) + query = matchAll + res <- Executor + .execute(ElasticRequest.search(IndexPattern("search-index*"), query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( + Assertion.contains(secondDocumentCopy) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document using a match boolean prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "test this is boolean") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchBooleanPrefix(TestDocument.stringField, "this is test bo") + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match phrase query with multi index") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + firstDocumentCopy = firstDocument.copy(stringField = "this is test") + _ <- + Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentCopy) + .refreshTrue + ) + secondDocumentCopy = secondDocument.copy(stringField = "this is test") + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](secondSearchIndex, secondDocumentId, secondDocumentCopy) + .refreshTrue + ) + query = matchPhrase( + field = TestDocument.stringField, + value = firstDocumentCopy.stringField + ) + + res <- Executor + .execute(ElasticRequest.search(MultiIndex.names(firstSearchIndex, secondSearchIndex), query)) + .documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocumentCopy)) && assert(res)( + Assertion.contains(secondDocumentCopy) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for a document using a match phrase query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchPhrase( + field = TestDocument.stringField, + value = firstDocument.stringField + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(document)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a match phrase prefix query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = s"${firstDocument.stringField} test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = matchPhrasePrefix( + field = TestDocument.stringField, + value = s"${firstDocument.stringField} te" + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a multi match query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + document = firstDocument.copy(stringField = "test") + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, document)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + + query = + multiMatch(value = "test").fields(TestDocument.stringField).matchingType(BestFields) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(document)) && assert(res)(!Assertion.contains(secondDocument))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test") + secondDocumentUpdated = + secondDocument.copy(stringField = s"this is ${secondDocument.stringField} another test") + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, thirdDocumentId, thirdDocument) + ) + .refreshTrue + ) + query = terms( + field = TestDocument.stringField, + values = firstDocument.stringField.toLowerCase, + secondDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocumentUpdated, secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms set query with minimumShouldMatchField") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) + secondDocumentUpdated = + secondDocument.copy( + stringField = + s"this is ${secondDocument.stringField} another test, not ${firstDocument.stringField}", + intField = 2 + ) + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + .refreshTrue + ) + query = termsSet( + field = "stringField", + minimumShouldMatchField = "intField", + terms = secondDocument.stringField.toLowerCase, + firstDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using a terms set query with minimumShouldMatchScript") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + firstDocumentUpdated = + firstDocument.copy(stringField = s"this is ${firstDocument.stringField} test", intField = 2) + secondDocumentUpdated = + secondDocument.copy( + stringField = s"this is ${secondDocument.stringField} test, not ${firstDocument.stringField}", + intField = 2 + ) + _ <- + Executor.execute( + ElasticRequest + .bulk( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentUpdated), + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocumentUpdated) + ) + .refreshTrue + ) + query = termsSetScript( + field = TestDocument.stringField, + minimumShouldMatchScript = Script("doc['intField'].value"), + terms = firstDocument.stringField.toLowerCase, + secondDocument.stringField.toLowerCase + ) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(Chunk(secondDocumentUpdated))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using nested query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + nested(path = TestDocument.subDocumentList, query = matchAll) + res <- + Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument, secondDocument))) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using should with satisfying minimumShouldMatch condition") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should( + matches(TestDocument.stringField, firstDocument.stringField), + matches(TestDocument.intField, firstDocument.intField), + matches(TestDocument.doubleField, firstDocument.doubleField + 1) + ).minimumShouldMatch(2) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using script query") { + checkN(4)(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery.script(Script("doc['booleanField'].value == true")) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument, secondDocument).filter(_.booleanField == true))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document that doesn't exist using regexp query without case insensitive ") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + ElasticQuery.regexp( + field = TestDocument.stringField, + value = + s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}".toUpperCase + ) + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield assert(res)(!Assertion.contains(firstDocument)) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using regexp query with case insensitive") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = ElasticQuery + .regexp( + field = TestDocument.stringField, + value = s"${firstDocument.stringField.take(1)}.*${firstDocument.stringField.takeRight(1)}" + ) + .caseInsensitiveTrue + res <- Executor + .execute(ElasticRequest.search(firstSearchIndex, query)) + .documentAs[TestDocument] + } yield (assert(res)(Assertion.contains(firstDocument)) && assert(res)( + !Assertion.contains(secondDocument) + )) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for a document using should with unsatisfying minimumShouldMatch condition") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should( + matches(TestDocument.stringField, firstDocument.stringField), + matches(TestDocument.intField, firstDocument.intField + 1), + matches(TestDocument.doubleField, firstDocument.doubleField + 1) + ).minimumShouldMatch(2) + res <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)).documentAs[TestDocument] + } yield assert(res)(isEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents with inner hits")( + test("search for a document using nested query with inner hits") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = + nested(path = TestDocument.subDocumentList, query = matchAll).innerHits + result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) + items <- result.items + res = + items.map(_.innerHitAs[TestSubDocument]("subDocumentList")).collect { case Right(value) => value } + } yield assert(res)( + hasSameElements(List(firstDocument.subDocumentList, secondDocument.subDocumentList)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents with highlights")( + test("successfully find document with highlight") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) + ) + items <- res.items + } yield assert(items.map(_.highlight("stringField")))( + hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find inner hit document with highlight") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- + Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = nested( + path = TestDocument.subDocumentList, + query = must( + matches( + TestSubDocument.stringField, + secondDocument.subDocumentList.headOption.map(_.stringField).getOrElse("foo") + ) + ) + ).innerHits( + InnerHits().highlights(highlight(TestSubDocument.stringField)) + ) + result <- Executor.execute(ElasticRequest.search(firstSearchIndex, query)) + items <- result.items + res = items + .flatMap(_.innerHit("subDocumentList")) + .flatten + .flatMap(_.highlight("subDocumentList.stringField")) + .flatten + } yield assert(res)( + Assertion.contains( + secondDocument.subDocumentList.headOption + .map(doc => s"${doc.stringField}") + .getOrElse("foo") + ) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight using field accessor") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight(TestDocument.stringField)) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"${firstDocument.stringField}")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlights and return highlights map successfully") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest.search(firstSearchIndex, query).highlights(highlight("stringField")) + ) + items <- res.items + } yield assert(items.map(_.highlights))( + hasSameElements(List(Some(Map("stringField" -> Chunk(s"${firstDocument.stringField}"))))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight while using global config") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest + .search(firstSearchIndex, query) + .highlights( + highlight(TestDocument.stringField) + .withGlobalConfig("pre_tags", Arr(Str("
    "))) + .withGlobalConfig("post_tags", Arr(Str("
"))) + ) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("successfully find document with highlight while using local config to overwrite global config") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = should(matches("stringField", firstDocument.stringField)) + res <- + Executor.execute( + ElasticRequest + .search(firstSearchIndex, query) + .highlights( + highlight( + TestDocument.stringField, + config = Map("pre_tags" -> Arr(Str("
    ")), "post_tags" -> Arr(Str("
"))) + ) + .withGlobalConfig("pre_tags", Arr(Str("
    "))) + .withGlobalConfig("post_tags", Arr(Str("
"))) + ) + ) + items <- res.items + } yield assert(items.map(_.highlight(TestDocument.stringField)))( + hasSameElements(List(Some(Chunk(s"
    ${firstDocument.stringField}
")))) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ), + suite("searching for sorted documents")( + test("search for document sorted by descending age and by ascending birthDate using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val firstDocumentWithFixedIntField = + firstDocument.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) + val secondDocumentWithFixedIntField = + secondDocument.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentWithFixedIntField + ) + .refreshTrue + ) + query = range(TestDocument.intField).gte(20) + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort( + sortBy(TestDocument.intField).order(Desc), + sortBy(TestDocument.dateField).order(Asc).format("strict_date_optional_time_nanos") + ) + ) + .documentAs[TestDocument] + } yield assert(res)( + equalTo(Chunk(secondDocumentWithFixedIntField, firstDocumentWithFixedIntField)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for document sorted by script where age is ascending using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstEmployee, secondDocumentId, secondEmployee) => + val firstDocumentWithFixedIntField = + firstEmployee.copy(intField = 30, dateField = LocalDate.parse("1993-12-05")) + val secondDocumentWithFixedIntField = + secondEmployee.copy(intField = 36, dateField = LocalDate.parse("1987-12-05")) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocumentWithFixedIntField) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + firstSearchIndex, + secondDocumentId, + secondDocumentWithFixedIntField + ) + .refreshTrue + ) + query = range(TestDocument.intField).gte(20) + res <- + Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort(sortBy(Script("doc['intField'].value").lang(Painless), NumberType).order(Asc)) + ) + .documentAs[TestDocument] + } yield assert(res)( + equalTo(Chunk(firstDocumentWithFixedIntField, secondDocumentWithFixedIntField)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for document sorted descending with 'max' mode by one field using matchAll query") { + checkOnce(genDocumentId, genTestSubDocument, genDocumentId, genTestSubDocument) { + (firstDocumentId, firstSubDocument, secondDocumentId, secondSubDocument) => + val firstSubDocumentWithFixedIntList = firstSubDocument.copy(intFieldList = List(11, 4, 37)) + val secondSubDocumentWithFixedIntList = secondSubDocument.copy(intFieldList = List(30, 29, 35)) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument](firstSearchIndex, firstDocumentId, firstSubDocumentWithFixedIntList) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestSubDocument]( + firstSearchIndex, + secondDocumentId, + secondSubDocumentWithFixedIntList + ) + .refreshTrue + ) + query = matchAll + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .sort(sortBy(TestSubDocument.intFieldList).mode(Max).order(Desc)) + ) + .documentAs[TestSubDocument] + } yield assert(res)( + equalTo(Chunk(firstSubDocumentWithFixedIntList, secondSubDocumentWithFixedIntList)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using scroll API and returning them as a stream")( + test("search for documents using range query") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, firstDocument) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, secondDocument) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) + } yield assert(res)(isNonEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("search for documents using range query with multiple pages") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 203).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream( + ElasticRequest.search(secondSearchIndex, query) + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(204))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for documents using range query with multiple pages and return type") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = + ZSink.collectAll[TestDocument] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .streamAs[TestDocument](ElasticRequest.search(secondSearchIndex, query)) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("search for documents using range query - empty stream") { + val sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor.stream(ElasticRequest.search(firstSearchIndex, query)).run(sink) + } yield assert(res)(hasSize(equalTo(0))) + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using PIT (point in time) and returning them as a stream")( + test("successfully create PIT and return stream results") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test( + "successfully create PIT and return stream results with changed page size and different keep alive parameters" + ) { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .stream( + ElasticRequest.search(secondSearchIndex, query), + StreamConfig.SearchAfter.withPageSize(40).keepAliveFor("2m") + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("successfully create PIT(point in time) and return stream results as specific type") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, TestDocument, Nothing, Chunk[TestDocument]] = + ZSink.collectAll[TestDocument] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + reqs = (0 to 200).map { _ => + ElasticRequest.create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.doubleField).gte(100.0) + res <- Executor + .streamAs[TestDocument]( + ElasticRequest.search(secondSearchIndex, query), + StreamConfig.SearchAfter + ) + .run(sink) + } yield assert(res)(hasSize(equalTo(201))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ), + test("successfully create point in time and return empty stream if there is no valid results") { + checkOnce(genTestDocument) { document => + def sink: Sink[Throwable, Item, Nothing, Chunk[Item]] = + ZSink.collectAll[Item] + + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(secondSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + secondSearchIndex, + document.copy(stringField = Random.alphanumeric.take(5).mkString, doubleField = 150) + ) + .refreshTrue + ) + query = range(TestDocument.doubleField).gte(200.0) + res <- Executor + .stream(ElasticRequest.search(secondSearchIndex, query), StreamConfig.SearchAfter) + .run(sink) + } yield assert(res)(isEmpty) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(secondSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(secondSearchIndex)).orDie + ) + ) @@ shrinks(0), + suite("searching for documents using SearchAfter Query")( + test("search for document sorted by ascending age while using search after query") { + checkOnce(genTestDocument) { firstDocument => + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + reqs = (0 to 100).map { i => + ElasticRequest.create[TestDocument]( + firstSearchIndex, + firstDocument.copy(stringField = Random.alphanumeric.take(5).mkString, intField = i) + ) + } + _ <- Executor.execute(ElasticRequest.bulk(reqs: _*).refreshTrue) + query = range(TestDocument.intField).gte(10) + res <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + sa <- res.lastSortValue + res2 <- Executor + .execute( + ElasticRequest + .search(firstSearchIndex, query) + .searchAfter(sa.get) + .size(10) + .sort( + sortBy(TestDocument.intField).order(Asc) + ) + ) + .documentAs[TestDocument] + } yield assert(res2.map(_.intField))( + equalTo(Chunk.fromIterable(20 to 29)) + ) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) @@ shrinks(0) + ), + suite("deleting by query")( + test("successfully delete all matched documents") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument, thirdDocumentId, thirdDocument) => + for { + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + firstDocumentId, + firstDocument.copy(doubleField = 150) + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + secondDocumentId, + secondDocument.copy(doubleField = 350) + ) + ) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument]( + deleteByQueryIndex, + thirdDocumentId, + thirdDocument.copy(doubleField = 400) + ) + .refreshTrue + ) + deleteQuery = range(TestDocument.doubleField).gte(300.0) + _ <- Executor + .execute(ElasticRequest.deleteByQuery(deleteByQueryIndex, deleteQuery).refreshTrue) + res <- Executor + .execute(ElasticRequest.search(deleteByQueryIndex, matchAll)) + .documentAs[TestDocument] + } yield assert(res)(hasSameElements(List(firstDocument.copy(doubleField = 150)))) + } + } @@ around( + Executor.execute(ElasticRequest.createIndex(deleteByQueryIndex)), + Executor.execute(ElasticRequest.deleteIndex(deleteByQueryIndex)).orDie + ), + test("returns NotFound when provided index is missing") { + checkOnce(genIndexName) { missingIndex => + assertZIO(Executor.execute(ElasticRequest.deleteByQuery(missingIndex, matchAll)))( + equalTo(DeletionOutcome.NotFound) + ) + } + } + ), + suite("bulk query")( + test("successfully execute bulk query") { + checkOnce(genDocumentId, genDocumentId, genDocumentId, genTestDocument) { + (firstDocumentId, secondDocumentId, thirdDocumentId, document) => + for { + _ <- Executor.execute( + ElasticRequest + .create[TestDocument](index, firstDocumentId, document.copy(stringField = "randomIdString")) + ) + _ <- + Executor.execute( + ElasticRequest + .create[TestDocument](index, secondDocumentId, document.copy(stringField = "randomIdString2")) + .refreshTrue + ) + req1 = ElasticRequest.create[TestDocument](index, thirdDocumentId, document) + req2 = ElasticRequest.create[TestDocument](index, document.copy(stringField = "randomIdString3")) + req3 = ElasticRequest.upsert[TestDocument](index, firstDocumentId, document.copy(doubleField = 3000)) + req4 = ElasticRequest.deleteById(index, secondDocumentId) + req5 = ElasticRequest.update[TestDocument](index, thirdDocumentId, document.copy(intField = 100)) + req6 = ElasticRequest.updateByScript( + index, + firstDocumentId, + Script("ctx._source.intField = params['factor']").params("factor" -> 100) + ) + req7 = + ElasticRequest + .update[TestDocument](index, DocumentId("invalid-document-id"), document.copy(intField = 100)) + res <- + Executor.execute(ElasticRequest.bulk(req1, req2, req3, req4, req5, req6, req7).refreshTrue) + doc1 <- Executor.execute(ElasticRequest.getById(index, firstDocumentId)).documentAs[TestDocument] + doc2 <- Executor.execute(ElasticRequest.getById(index, secondDocumentId)).documentAs[TestDocument] + doc3 <- Executor.execute(ElasticRequest.getById(index, thirdDocumentId)).documentAs[TestDocument] + } yield assert(res.items.size)(equalTo(7)) && + assert(res.items.map(_.error.isDefined))( + equalTo(Chunk(false, false, false, false, false, false, true)) + ) && + assert(res.items(6).status)(equalTo(Some(404))) && + assert(res.items(6).error.map(_.`type`))(equalTo(Some("document_missing_exception"))) && + assert(doc3)(isSome(equalTo(document.copy(intField = 100)))) && + assert(doc2)(isNone) && assert(doc1)( + isSome(equalTo(document.copy(doubleField = 3000, intField = 100))) + ) + } + } + ), + suite("updating document")( + test("successfully update document with script") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val intField = document.intField + val factor = 2 + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, document)) + _ <- Executor.execute( + ElasticRequest.updateByScript( + index, + documentId, + Script("ctx._source.intField += params['factor']").params("factor" -> factor) + ) + ) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document.copy(intField = intField + factor)))) + } + }, + test("successfully create document if it does not exist") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + for { + _ <- Executor.execute( + ElasticRequest + .updateByScript( + index, + documentId, + Script("ctx._source.intField += params['factor']").params("factor" -> 2) + ) + .orCreate(document) + ) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(document))) + } + }, + test("successfully update document with doc") { + checkOnce(genDocumentId, genTestDocument, genTestDocument) { (documentId, firstDocument, secondDocument) => + for { + _ <- Executor.execute(ElasticRequest.upsert[TestDocument](index, documentId, firstDocument)) + _ <- Executor.execute(ElasticRequest.update[TestDocument](index, documentId, secondDocument)) + doc <- Executor.execute(ElasticRequest.getById(index, documentId)).documentAs[TestDocument] + } yield assert(doc)(isSome(equalTo(secondDocument))) + } + } + ), + suite("updating document by query")( + test("successfully update document with only script") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val stringField = "StringField" + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, document).refreshTrue + ) + updateRes <- + Executor.execute( + ElasticRequest + .updateAllByQuery( + updateByQueryIndex, + Script("ctx._source['stringField'] = params['str']").params("str" -> stringField) + ) + .refreshTrue + ) + doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] + } yield assert(updateRes)( + equalTo( + UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) + ) + ) && assert(doc)(isSome(equalTo(document.copy(stringField = stringField)))) + } + }, + test("successfully update document with script and query") { + checkOnce(genDocumentId, genTestDocument) { (documentId, document) => + val newDocument = document.copy(stringField = "StringField") + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(updateByQueryIndex, matchAll).refreshTrue) + _ <- Executor.execute( + ElasticRequest.upsert[TestDocument](updateByQueryIndex, documentId, newDocument).refreshTrue + ) + updateRes <- + Executor.execute( + ElasticRequest + .updateByQuery( + index = updateByQueryIndex, + query = term(field = TestDocument.stringField.keyword, value = "StringField"), + script = Script("ctx._source['intField']++") + ) + .refreshTrue + ) + doc <- Executor.execute(ElasticRequest.getById(updateByQueryIndex, documentId)).documentAs[TestDocument] + } yield assert(updateRes)( + equalTo( + UpdateByQueryResult(took = updateRes.took, total = 1, updated = 1, deleted = 0, versionConflicts = 0) + ) + ) && assert(doc)(isSome(equalTo(newDocument.copy(intField = newDocument.intField + 1)))) + } + } + ), + suite("geo-distance query")( + test("using geo-distance query") { + checkOnce(genTestDocument) { document => + val indexDefinition = + """ + |{ + | "mappings": { + | "properties": { + | "geoPointField": { + | "type": "geo_point" + | } + | } + | } + |} + |""".stripMargin + + for { + _ <- Executor.execute(ElasticRequest.createIndex(geoDistanceIndex, indexDefinition)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(geoDistanceIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](geoDistanceIndex, document).refreshTrue + ) + result <- Executor + .execute( + ElasticRequest.search( + geoDistanceIndex, + ElasticQuery + .geoDistance( + "geoPointField", + GeoPoint(document.geoPointField.lat, document.geoPointField.lon), + Distance(300, Kilometers) + ) + ) + ) + .documentAs[TestDocument] + } yield assert(result)(equalTo(Chunk(document))) + } + } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoDistanceIndex)).orDie) + ), + suite("geo-polygon query")( + test("using geo-polygon query") { + checkOnce(genTestDocument) { document => + val indexDefinition = + """ + |{ + | "mappings": { + | "properties": { + | "geoPointField": { + | "type": "geo_point" + | } + | } + | } + |} + |""".stripMargin + + for { + _ <- Executor.execute(ElasticRequest.createIndex(geoPolygonIndex, indexDefinition)) + _ <- Executor.execute(ElasticRequest.deleteByQuery(geoPolygonIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](geoPolygonIndex, document).refreshTrue + ) + + r1 <- Executor + .execute( + ElasticRequest.search( + geoPolygonIndex, + ElasticQuery + .geoPolygon("geoPointField", Chunk("0, 0", "0, 90", "90, 90", "90, 0")) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)(equalTo(Chunk(document))) + } + } @@ after(Executor.execute(ElasticRequest.deleteIndex(geoPolygonIndex)).orDie) + ), + suite("search for documents using FunctionScore query")( + test("using randomScore function") { + checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => + val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue + ) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + firstSearchIndex, + secondDocumentUpdated + ) + .refreshTrue + ) + r1 <- Executor + .execute( + ElasticRequest.search( + firstSearchIndex, + ElasticQuery + .functionScore(randomScoreFunction()) + .query(matches("stringField", firstDocument.stringField)) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)( + hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ), + test("using randomScore function and weight function") { + checkOnce(genTestDocument, genTestDocument) { (firstDocument, secondDocument) => + val secondDocumentUpdated = secondDocument.copy(stringField = firstDocument.stringField) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + _ <- Executor.execute( + ElasticRequest.create[TestDocument](firstSearchIndex, firstDocument).refreshTrue + ) + _ <- Executor.execute( + ElasticRequest + .create[TestDocument]( + firstSearchIndex, + secondDocumentUpdated + ) + .refreshTrue + ) + r1 <- Executor + .execute( + ElasticRequest.search( + firstSearchIndex, + ElasticQuery + .functionScore( + FunctionScoreFunction.randomScoreFunction(), + FunctionScoreFunction.weightFunction(2) + ) + .query(matches("stringField", firstDocument.stringField)) + .boost(2.0) + .boostMode(FunctionScoreBoostMode.Max) + ) + ) + .documentAs[TestDocument] + } yield assert(r1)( + hasSameElements(Chunk(firstDocument, secondDocumentUpdated)) + ) + } + } @@ around( + Executor.execute( + ElasticRequest.createIndex( + firstSearchIndex, + """{ "mappings": { "properties": { "subDocumentList": { "type": "nested" } } } }""" + ) + ), + Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie + ) + ) + ) @@ nondeterministic @@ sequential @@ prepareElasticsearchIndexForTests @@ afterAll( + Executor.execute(ElasticRequest.deleteIndex(index)).orDie + ) + ).provideShared( + elasticsearchLayer + ) + } +} From 390ef220fed551c7af480fad8494bd66be8393f5 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Thu, 19 Jun 2025 16:41:11 +0200 Subject: [PATCH 12/15] Add docs. --- .../elstic_aggregation_ip_range.md | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 docs/overview/aggregations/elstic_aggregation_ip_range.md diff --git a/docs/overview/aggregations/elstic_aggregation_ip_range.md b/docs/overview/aggregations/elstic_aggregation_ip_range.md new file mode 100644 index 000000000..19d11af25 --- /dev/null +++ b/docs/overview/aggregations/elstic_aggregation_ip_range.md @@ -0,0 +1,60 @@ +--- +id: elastic_aggregation_ip_range +title: "Ip Range Aggregation" +--- + +The `Ip Range` aggregation is a multi-bucket aggregation that creates buckets for ranges of IP addresses, either using `from`/`to` values or `CIDR` masks. + +In order to use the `IP Range` aggregation import the following: +```scala +import zio.elasticsearch.aggregation.IpRangeAggregation +import zio.elasticsearch.ElasticAggregation.ipRangeAggregation +``` + +You can create a [type-safe](https://lambdaworks.github.io/zio-elasticsearch/overview/overview_zio_prelude_schema) `IpRange` aggregation using the `IpRangeAggregation` method this way: +```scala +val aggregation: IpRangeAggregation = + ipRangeAggregation( + name = "ip_range_agg", + field = Document.stringField, + ranges = Chunk( + IpRange.IpRangeBound(to = Some("10.0.0.5")), + IpRange.IpRangeBound(from = Some("10.0.0.5")) + ) + ) +``` + +You can create an IpRangeAggregation using the ipRangeAggregation method in the following way: +```scala +val aggregation: IpRangeAggregation = + ipRangeAggregation( + name = "ip_range_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(to = Some("10.0.0.5")), + IpRange.IpRangeBound(from = Some("10.0.0.5")) + ) + ) +``` + +You can also use CIDR masks for ranges: +```scala +val cidrAggregation: IpRangeAggregation = + ipRangeAggregation( + name = "cidr_agg", + field = "ipField", + ranges = Chunk( + IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), + IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) + ) + ) +``` + +If you want to explicitly set the keyed property: +```scala +val multipleAggregations = + ipRangeAggregation("ip_range_agg", "ipField", Chunk(IpRange.IpRangeBound(to = Some("10.0.0.5")))) + .withAgg(maxAggregation("maxAgg", "someField")) +``` + +You can find more information about `Ip Range` aggregation [here](https://www.elastic.co/docs/reference/aggregations/search-aggregations-bucket-iprange-aggregation). \ No newline at end of file From bfb9ea0253cbcae4b730d15189a283c2c3a9b40e Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Thu, 19 Jun 2025 16:47:40 +0200 Subject: [PATCH 13/15] Add to sidebars.js --- ...c_aggregation_ip_range.md => elastic_aggregation_ip_range.md} | 0 website/sidebars.js | 1 + 2 files changed, 1 insertion(+) rename docs/overview/aggregations/{elstic_aggregation_ip_range.md => elastic_aggregation_ip_range.md} (100%) diff --git a/docs/overview/aggregations/elstic_aggregation_ip_range.md b/docs/overview/aggregations/elastic_aggregation_ip_range.md similarity index 100% rename from docs/overview/aggregations/elstic_aggregation_ip_range.md rename to docs/overview/aggregations/elastic_aggregation_ip_range.md diff --git a/website/sidebars.js b/website/sidebars.js index 8c11fe381..efa03144f 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -53,6 +53,7 @@ module.exports = { 'overview/aggregations/elastic_aggregation_cardinality', 'overview/aggregations/elastic_aggregation_extended_stats', 'overview/aggregations/elastic_aggregation_filter', + 'overview/aggregations/elastic_aggregation_ip_range', 'overview/aggregations/elastic_aggregation_max', 'overview/aggregations/elastic_aggregation_min', 'overview/aggregations/elastic_aggregation_missing', From ed07116f262d4183b7f83fb5779153714482f579 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Fri, 20 Jun 2025 12:58:04 +0200 Subject: [PATCH 14/15] Done. --- .../elastic_aggregation_ip_range.md | 3 +- .../zio/elasticsearch/HttpExecutorSpec.scala | 76 ++++++++++--------- .../elasticsearch/ElasticAggregation.scala | 7 +- .../aggregation/Aggregations.scala | 20 +++-- .../ElasticAggregationSpec.scala | 43 +++++------ 5 files changed, 81 insertions(+), 68 deletions(-) diff --git a/docs/overview/aggregations/elastic_aggregation_ip_range.md b/docs/overview/aggregations/elastic_aggregation_ip_range.md index 19d11af25..f4088648b 100644 --- a/docs/overview/aggregations/elastic_aggregation_ip_range.md +++ b/docs/overview/aggregations/elastic_aggregation_ip_range.md @@ -24,7 +24,7 @@ val aggregation: IpRangeAggregation = ) ``` -You can create an IpRangeAggregation using the ipRangeAggregation method in the following way: +You can create an `IpRange` aggregation using the `IpRangeAggregation` method this way: ```scala val aggregation: IpRangeAggregation = ipRangeAggregation( @@ -54,6 +54,7 @@ If you want to explicitly set the keyed property: ```scala val multipleAggregations = ipRangeAggregation("ip_range_agg", "ipField", Chunk(IpRange.IpRangeBound(to = Some("10.0.0.5")))) + .keyedOn .withAgg(maxAggregation("maxAgg", "someField")) ``` diff --git a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala index 160aef368..bbbb25e46 100644 --- a/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala +++ b/modules/integration/src/test/scala/zio/elasticsearch/HttpExecutorSpec.scala @@ -16,11 +16,11 @@ package zio.elasticsearch -import zio.Chunk import zio.elasticsearch.ElasticAggregation._ import zio.elasticsearch.ElasticHighlight.highlight import zio.elasticsearch.ElasticQuery.{script => _, _} import zio.elasticsearch.ElasticSort.sortBy +import zio.elasticsearch.aggregation.IpRange.IpRangeBound import zio.elasticsearch.aggregation.{AggregationOrder, IpRange} import zio.elasticsearch.data.GeoPoint import zio.elasticsearch.domain.{PartialTestDocument, TestDocument, TestSubDocument} @@ -41,6 +41,7 @@ import zio.stream.{Sink, ZSink} import zio.test.Assertion._ import zio.test.TestAspect._ import zio.test._ +import zio.{Chunk, NonEmptyChunk} import java.time.LocalDate import scala.util.Random @@ -467,51 +468,51 @@ object HttpExecutorSpec extends IntegrationSpec { Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie ), - test("aggregate using ip range aggregation") { - checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docIdA, docA, docIdB, docB) => - val updatedA = docA.copy(stringField = "192.168.1.10") - val updatedB = docB.copy(stringField = "192.168.1.200") + test("aggregate using IpRange aggregation") { + checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { + (firstDocumentId, firstDocument, secondDocumentId, secondDocument) => + val updatedA = firstDocument.copy(stringField = "10.0.0.10") + val updatedB = secondDocument.copy(stringField = "10.0.0.200") - for { - _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) + for { + _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docIdA, updatedA)) - _ <- Executor.execute( - ElasticRequest - .upsert[TestDocument](firstSearchIndex, docIdB, updatedB) - .refreshTrue - ) + _ <- + Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, firstDocumentId, updatedA)) + _ <- Executor.execute( + ElasticRequest + .upsert[TestDocument](firstSearchIndex, secondDocumentId, updatedB) + .refreshTrue + ) - aggregation = IpRange( - name = "ip_range_agg", - field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("192.168.1.100")), - IpRange.IpRangeBound( - from = Some("192.168.1.100"), - to = Some("192.168.1.255") - ) - ), - keyed = None, - subAggregations = None - ) + aggregation = IpRange( + name = "ip_ranges", + field = "ipField", + ranges = NonEmptyChunk( + IpRangeBound(to = Some("10.0.0.5")), + IpRangeBound(from = Some("10.0.0.5")) + ), + keyed = None, + subAggregations = None + ) - result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) - agg <- result.aggregation("ip_range_agg") - } yield assertTrue(agg.nonEmpty) + result <- + Executor + .execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + .aggregations + } yield assert(result)(isNonEmpty) } } @@ around( Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), Executor.execute(ElasticRequest.deleteIndex(firstSearchIndex)).orDie ), - test("aggregate using ip range aggregation with CIDR masks") { + test("aggregate using Ip range aggregation with CIDR masks") { checkOnce(genDocumentId, genTestDocument, genDocumentId, genTestDocument) { (docId1, doc1, docId2, doc2) => val updated1 = doc1.copy(stringField = "10.0.0.10") val updated2 = doc2.copy(stringField = "10.0.0.120") for { _ <- Executor.execute(ElasticRequest.deleteByQuery(firstSearchIndex, matchAll)) - _ <- Executor.execute(ElasticRequest.upsert[TestDocument](firstSearchIndex, docId1, updated1)) _ <- Executor.execute( ElasticRequest @@ -522,17 +523,18 @@ object HttpExecutorSpec extends IntegrationSpec { aggregation = IpRange( name = "cidr_agg", field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), - IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) + ranges = NonEmptyChunk( + IpRangeBound(mask = Some("10.0.0.0/25")), + IpRangeBound(mask = Some("10.0.0.128/25")) ), keyed = None, subAggregations = None ) - result <- Executor.execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) - agg <- result.aggregation("cidr_agg") - } yield assertTrue(agg.nonEmpty) + result <- Executor + .execute(ElasticRequest.aggregate(firstSearchIndex, aggregation)) + .aggregations + } yield assert(result)(isNonEmpty) } } @@ around( Executor.execute(ElasticRequest.createIndex(firstSearchIndex)), diff --git a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala index 0b52c4978..5e4a2fbff 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/ElasticAggregation.scala @@ -16,10 +16,11 @@ package zio.elasticsearch -import zio.Chunk +import zio.elasticsearch.aggregation.IpRange.IpRangeBound import zio.elasticsearch.aggregation._ import zio.elasticsearch.query.ElasticQuery import zio.elasticsearch.script.Script +import zio.{Chunk, NonEmptyChunk} object ElasticAggregation { @@ -178,7 +179,7 @@ object ElasticAggregation { def ipRangeAggregation( name: String, field: Field[_, String], - ranges: Chunk[IpRange.IpRangeBound] + ranges: NonEmptyChunk[IpRangeBound] ): IpRangeAggregation = IpRange(name = name, field = field.toString, ranges = ranges, keyed = None, subAggregations = None) @@ -196,7 +197,7 @@ object ElasticAggregation { * @return * An instance of [[IpRangeAggregation]] configured with the provided parameters. */ - def ipRangeAggregation(name: String, field: String, ranges: Chunk[IpRange.IpRangeBound]): IpRangeAggregation = + def ipRangeAggregation(name: String, field: String, ranges: NonEmptyChunk[IpRangeBound]): IpRangeAggregation = IpRange(name = name, field = field, ranges = ranges, keyed = None, subAggregations = None) /** diff --git a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala index 935d6eaea..476979b4e 100644 --- a/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala +++ b/modules/library/src/main/scala/zio/elasticsearch/aggregation/Aggregations.scala @@ -16,15 +16,16 @@ package zio.elasticsearch.aggregation -import zio.Chunk import zio.elasticsearch.ElasticAggregation.multipleAggregations import zio.elasticsearch.ElasticPrimitive.ElasticPrimitiveOps +import zio.elasticsearch.aggregation.IpRange.IpRangeBound import zio.elasticsearch.aggregation.options._ import zio.elasticsearch.query.ElasticQuery import zio.elasticsearch.query.sort.Sort import zio.elasticsearch.script.Script import zio.json.ast.Json import zio.json.ast.Json.{Arr, Obj} +import zio.{Chunk, NonEmptyChunk} sealed trait ElasticAggregation { self => private[elasticsearch] def toJson: Json @@ -207,15 +208,15 @@ private[elasticsearch] final case class Filter( sealed trait IpRangeAggregation extends SingleElasticAggregation with WithAgg with WithSubAgg[IpRangeAggregation] -final case class IpRange( +private[elasticsearch] final case class IpRange( name: String, field: String, - ranges: Chunk[IpRange.IpRangeBound], + ranges: NonEmptyChunk[IpRangeBound], keyed: Option[Boolean], subAggregations: Option[Chunk[SingleElasticAggregation]] ) extends IpRangeAggregation { self => - def keyed(value: Boolean): IpRangeAggregation = self.copy(keyed = Some(value)) + def keyedOn: IpRangeAggregation = self.copy(keyed = Some(true)) def withAgg(aggregation: SingleElasticAggregation): MultipleAggregations = multipleAggregations.aggregations(self, aggregation) @@ -248,7 +249,16 @@ object IpRange { to: Option[String] = None, mask: Option[String] = None, key: Option[String] = None - ) { + ) { self => + + def from(value: String): IpRangeBound = self.copy(from = Some(value)) + + def to(value: String): IpRangeBound = self.copy(to = Some(value)) + + def mask(value: String): IpRangeBound = self.copy(mask = Some(value)) + + def key(value: String): IpRangeBound = self.copy(key = Some(value)) + def toJson: Json = { val baseFields = Chunk.empty[(String, Json)] ++ from.map("from" -> _.toJson) ++ diff --git a/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala b/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala index 4130e6e3b..f3029e130 100644 --- a/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala +++ b/modules/library/src/test/scala/zio/elasticsearch/ElasticAggregationSpec.scala @@ -1,8 +1,8 @@ package zio.elasticsearch -import zio.Chunk import zio.elasticsearch.ElasticAggregation._ import zio.elasticsearch.ElasticQuery.term +import zio.elasticsearch.aggregation.IpRange.IpRangeBound import zio.elasticsearch.aggregation._ import zio.elasticsearch.domain.{TestDocument, TestSubDocument} import zio.elasticsearch.query.sort.SortOrder.{Asc, Desc} @@ -11,6 +11,7 @@ import zio.elasticsearch.script.Script import zio.elasticsearch.utils._ import zio.test.Assertion.equalTo import zio.test._ +import zio.{Chunk, NonEmptyChunk} object ElasticAggregationSpec extends ZIOSpecDefault { def spec: Spec[TestEnvironment, Any] = @@ -155,9 +156,9 @@ object ElasticAggregationSpec extends ZIOSpecDefault { ipRangeAggregation( name = "ip_range_agg", field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("10.0.0.5")), - IpRange.IpRangeBound(from = Some("10.0.0.5")) + ranges = NonEmptyChunk( + IpRangeBound(to = Some("10.0.0.5")), + IpRangeBound(from = Some("10.0.0.5")) ) ) @@ -166,9 +167,9 @@ object ElasticAggregationSpec extends ZIOSpecDefault { IpRange( name = "ip_range_agg", field = "ipField", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("10.0.0.5")), - IpRange.IpRangeBound(from = Some("10.0.0.5")) + ranges = NonEmptyChunk( + IpRangeBound(to = Some("10.0.0.5")), + IpRangeBound(from = Some("10.0.0.5")) ), keyed = None, subAggregations = None @@ -993,19 +994,19 @@ object ElasticAggregationSpec extends ZIOSpecDefault { assert(aggregationWithSubAggregation.toJson)(equalTo(expectedWithSubAggregation.toJson)) && assert(aggregationWithMultipleSubAggregations.toJson)(equalTo(expectedWithMultipleSubAggregations.toJson)) }, - test("ip_range aggregation with from/to ") { - val agg = IpRange( + test("ipRange") { + val aggFromTo = IpRange( name = "ip_range_agg", field = "ip", - ranges = Chunk( - IpRange.IpRangeBound(to = Some("10.0.0.5")), - IpRange.IpRangeBound(from = Some("10.0.0.5")) + ranges = NonEmptyChunk( + IpRangeBound(to = Some("10.0.0.5")), + IpRangeBound(from = Some("10.0.0.5")) ), keyed = None, subAggregations = None ) - val expectedJson = + val expectedJsonFromTo = """ |{ | "ip_range_agg": { @@ -1024,21 +1025,18 @@ object ElasticAggregationSpec extends ZIOSpecDefault { |} |""".stripMargin - assert(agg.toJson)(equalTo(expectedJson.toJson)) - }, - test("ip_range aggregation with CIDR masks and keyed = true") { - val agg = IpRange( + val aggMaskKeyed = IpRange( name = "ip_range_agg", field = "ip", - ranges = Chunk( - IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), - IpRange.IpRangeBound(mask = Some("10.0.0.127/25")) + ranges = NonEmptyChunk( + IpRangeBound(mask = Some("10.0.0.0/25")), + IpRangeBound(mask = Some("10.0.0.127/25")) ), keyed = Some(true), subAggregations = None ) - val expectedJson = + val expectedJsonMaskKeyed = """ |{ | "ip_range_agg": { @@ -1058,7 +1056,8 @@ object ElasticAggregationSpec extends ZIOSpecDefault { |} |""".stripMargin - assert(agg.toJson)(equalTo(expectedJson.toJson)) + assert(aggFromTo.toJson)(equalTo(expectedJsonFromTo.toJson)) && + assert(aggMaskKeyed.toJson)(equalTo(expectedJsonMaskKeyed.toJson)) }, test("max") { val aggregation = maxAggregation("aggregation", "testField") From a1e18fffcf36de1895ea70a7ab65b8ce34420891 Mon Sep 17 00:00:00 2001 From: Marko Krstic Date: Fri, 20 Jun 2025 13:03:09 +0200 Subject: [PATCH 15/15] Fix some lines. --- .../overview/aggregations/elastic_aggregation_ip_range.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/overview/aggregations/elastic_aggregation_ip_range.md b/docs/overview/aggregations/elastic_aggregation_ip_range.md index f4088648b..d46554727 100644 --- a/docs/overview/aggregations/elastic_aggregation_ip_range.md +++ b/docs/overview/aggregations/elastic_aggregation_ip_range.md @@ -17,7 +17,7 @@ val aggregation: IpRangeAggregation = ipRangeAggregation( name = "ip_range_agg", field = Document.stringField, - ranges = Chunk( + ranges = NonEmptyChunk( IpRange.IpRangeBound(to = Some("10.0.0.5")), IpRange.IpRangeBound(from = Some("10.0.0.5")) ) @@ -30,7 +30,7 @@ val aggregation: IpRangeAggregation = ipRangeAggregation( name = "ip_range_agg", field = "ipField", - ranges = Chunk( + ranges = NonEmptyChunk( IpRange.IpRangeBound(to = Some("10.0.0.5")), IpRange.IpRangeBound(from = Some("10.0.0.5")) ) @@ -43,7 +43,7 @@ val cidrAggregation: IpRangeAggregation = ipRangeAggregation( name = "cidr_agg", field = "ipField", - ranges = Chunk( + ranges = NonEmptyChunk( IpRange.IpRangeBound(mask = Some("10.0.0.0/25")), IpRange.IpRangeBound(mask = Some("10.0.0.128/25")) ) @@ -53,7 +53,7 @@ val cidrAggregation: IpRangeAggregation = If you want to explicitly set the keyed property: ```scala val multipleAggregations = - ipRangeAggregation("ip_range_agg", "ipField", Chunk(IpRange.IpRangeBound(to = Some("10.0.0.5")))) + ipRangeAggregation("ip_range_agg", "ipField", NonEmptyChunk(IpRange.IpRangeBound(to = Some("10.0.0.5")))) .keyedOn .withAgg(maxAggregation("maxAgg", "someField")) ```