Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
.github/**
ci/**
docs/**
testdata/**
**/testdata/**
Makefile
README.md
LICENSE.txt
CHANGELOG.md
Dockerfile
docker-compose.yaml
docker-compose.yaml
*_test.go
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

- Add nested filtering support for nested flattened mappings ([#85](https://github.com/hasura/ndc-elasticsearch/pull/85))

## [1.8.0]

- Add basic query (no operator) support for unsupported object types ([#83](https://github.com/hasura/ndc-elasticsearch/pull/83))
Expand Down
3 changes: 3 additions & 0 deletions connector/connector.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ func (c *Connector) GetCapabilities(configuration *types.Configuration) schema.C
FilterBy: schema.LeafCapability{},
Aggregates: schema.LeafCapability{},
},
Exists: schema.ExistsCapabilities{
NestedCollections: schema.LeafCapability{},
},
},
},
}
Expand Down
67 changes: 61 additions & 6 deletions connector/filter.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package connector

import (
"fmt"
"strings"

"github.com/hasura/ndc-elasticsearch/internal"
Expand All @@ -11,10 +12,16 @@ import (
// prepareFilterQuery prepares a filter query based on the given expression.
func prepareFilterQuery(expression schema.Expression, state *types.State, collection string) (map[string]interface{}, error) {
filter := make(map[string]interface{})
switch expr := expression.Interface().(type) {
columnPath, predicate := getPredicate(expression)

switch expr := predicate.Interface().(type) {
case *schema.ExpressionUnaryComparisonOperator:
fieldPath := strings.Split(columnPath, ".")
expr.Column.FieldPath = fieldPath
return handleExpressionUnaryComparisonOperator(expr, state, collection)
case *schema.ExpressionBinaryComparisonOperator:
fieldPath := strings.Split(columnPath, ".")
expr.Column.FieldPath = fieldPath
return handleExpressionBinaryComparisonOperator(expr, state, collection)
case *schema.ExpressionAnd:
queries := make([]map[string]interface{}, 0)
Expand Down Expand Up @@ -59,10 +66,52 @@ func prepareFilterQuery(expression schema.Expression, state *types.State, collec
}
}

// getPredicate checks if a schema.Expression has nested filtering
// if it does, it traverses the schema.Expression recursively until it finds a non-nested query predicate
func getPredicate(expression schema.Expression) (string, schema.Expression) {
if nested, fieldName := requiresNestedFiltering(expression); nested {
expressionPredicate, ok := expression["predicate"].(schema.Expression)
if !ok {
return "", nil
}

columnPathPostfix, predicate := getPredicate(expressionPredicate)
return fmt.Sprintf("%s.%s", fieldName, columnPathPostfix), predicate
}
switch expr := expression.Interface().(type) {
case *schema.ExpressionUnaryComparisonOperator:
return expr.Column.Name, expression
case *schema.ExpressionBinaryComparisonOperator:
return expr.Column.Name, expression
}

return "", expression
}

func requiresNestedFiltering(predicate schema.Expression) (requiresNestedFiltering bool, nestedFieldName string) {
inCollection, ok := predicate["in_collection"].(schema.ExistsInCollection)
if !ok {
return false, ""
}
collection, err := inCollection.AsNestedCollection()
if err != nil {
return false, ""
}
if collection.Type == "nested_collection" {
return true, collection.ColumnName
}
return false, ""
}

// handleExpressionUnaryComparisonOperator processes the unary comparison operator expression.
func handleExpressionUnaryComparisonOperator(expr *schema.ExpressionUnaryComparisonOperator, state *types.State, collection string) (map[string]interface{}, error) {
if expr.Operator == "is_null" {
fieldName, _ := joinFieldPath(state, expr.Column.FieldPath, expr.Column.Name, collection)
if len(expr.Column.FieldPath) == 0 || expr.Column.FieldPath[len(expr.Column.FieldPath)-1] != expr.Column.Name {
// if the column name is not the last element in fieldPath, we'll add it so that the fieldpath is complete
expr.Column.FieldPath = append(expr.Column.FieldPath, expr.Column.Name)
}

fieldName := strings.Join(expr.Column.FieldPath, ".")
value := map[string]interface{}{
"field": fieldName,
}
Expand All @@ -85,7 +134,12 @@ func handleExpressionBinaryComparisonOperator(
state *types.State,
collection string,
) (map[string]interface{}, error) {
fieldPath, nestedPath := joinFieldPath(state, expr.Column.FieldPath, expr.Column.Name, collection)
if len(expr.Column.FieldPath) == 0 || expr.Column.FieldPath[len(expr.Column.FieldPath)-1] != expr.Column.Name {
// if the column name is not the last element in fieldPath, we'll add it so that the fieldpath is complete
expr.Column.FieldPath = append(expr.Column.FieldPath, expr.Column.Name)
}

fieldPath := strings.Join(expr.Column.FieldPath, ".")
fieldType, fieldSubTypes, _, err := state.Configuration.GetFieldProperties(collection, fieldPath)
if err != nil {
return nil, schema.UnprocessableContentError("unable to get field types", map[string]any{
Expand All @@ -110,9 +164,10 @@ func handleExpressionBinaryComparisonOperator(
expr.Operator: value,
}

if nestedPath != "" {
filter = prepareNestedQuery(state, expr.Operator, value, fieldPath, len(expr.Column.FieldPath), collection)
}
// TOOD: re-enable
// if nestedPath != "" {
// filter = prepareNestedQuery(state, expr.Operator, value, fieldPath, len(expr.Column.FieldPath), collection)
// }

return filter, nil
}
Expand Down
124 changes: 124 additions & 0 deletions connector/filter_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
package connector

import (
"encoding/json"
"os"
"path/filepath"
"testing"

"github.com/hasura/ndc-sdk-go/schema"
"github.com/stretchr/testify/assert"
)

const filterTestsPath = "./testdata/filter_tests/"

func TestGetPredicate(t *testing.T) {
tests := []struct {
name string
gotExpression string
expectedColumnPath string
wantPredicate string
}{
{
name: "nested_001",
expectedColumnPath: "route.departure_airport.location.state",
},
{
name: "nested_and_002",
expectedColumnPath: "",
},
{
name: "003",
expectedColumnPath: "route.arrival_airport.location.coordinates.elevation",
},
{
name: "004",
expectedColumnPath: "route.arrival_airport.terminals",
},
{
name: "aggregations_005",
expectedColumnPath: "metric_value",
},
{
name: "006",
expectedColumnPath: "name",
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// setup test data
gotB, err := os.ReadFile(filepath.Join(filterTestsPath, "get_predicate", tt.name, "got.json"))
assert.NoError(t, err, "Error reading got.json file")
tt.gotExpression = string(gotB)

wantB, err := os.ReadFile(filepath.Join(filterTestsPath, "get_predicate", tt.name, "want.json"))
assert.NoError(t, err, "Error reading want.json file")
tt.wantPredicate = string(wantB)

// Convert tt.expression from JSON string to schema.Expression
var expression schema.Expression
err = json.Unmarshal([]byte(tt.gotExpression), &expression)
assert.NoError(t, err, "Error unmarshalling expression JSON")

// Convert tt.expectedPredicate from JSON string to schema.Expression
var expectedPredicate schema.Expression
err = json.Unmarshal([]byte(tt.wantPredicate), &expectedPredicate)
assert.NoError(t, err, "Error unmarshalling expectedPredicate JSON")

// Call getPredicate and validate results
path, result := getPredicate(expression)
assert.Equal(t, tt.expectedColumnPath, path)
assert.Equal(t, expectedPredicate, result)

// uncomment to update want file
// err = os.WriteFile(filepath.Join(filterTestsPath, "get_predicate", tt.name, "want.json"), []byte(tt.wantPredicate), 0644)
// assert.NoError(t, err, "Error writing want file")
})
}
}

func TestRequiresNestedFiltering(t *testing.T) {
tests := []struct {
name string
predicate schema.Expression
expectedNested bool
expectedNestedField string
}{
{
name: "Valid nested collection",
predicate: schema.Expression{
"in_collection": schema.ExistsInCollection{
"column_name": "my_nested_field",
"type": "nested_collection",
},
},
expectedNested: true,
expectedNestedField: "my_nested_field",
},
{
name: "Missing in_collection key",
predicate: schema.Expression{
"some_other_key": "some_value",
},
expectedNested: false,
expectedNestedField: "",
},
{
name: "Invalid in_collection type",
predicate: schema.Expression{
"in_collection": "invalid_type",
},
expectedNested: false,
expectedNestedField: "",
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
nested, fieldName := requiresNestedFiltering(tt.predicate)
assert.Equal(t, tt.expectedNested, nested)
assert.Equal(t, tt.expectedNestedField, fieldName)
})
}
}
16 changes: 16 additions & 0 deletions connector/query_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,22 @@ var tests = []test{
group: "payments",
name: "search_after_multiple_sorts",
},
{
group: "flights_nested_flattened",
name: "nested_filtering",
},
{
group: "flights_nested_flattened",
name: "nested_filtering_and",
},
{
group: "flights_nested_flattened",
name: "nested_filtering_range",
},
{
group: "flights_nested_flattened",
name: "nested_filtering_or",
},
}

func TestPrepareElasticsearchQuery(t *testing.T) {
Expand Down
46 changes: 46 additions & 0 deletions connector/testdata/filter_tests/get_predicate/003/got.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
{
"in_collection": {
"column_name": "route",
"type": "nested_collection"
},
"predicate": {
"in_collection": {
"column_name": "arrival_airport",
"type": "nested_collection"
},
"predicate": {
"in_collection": {
"column_name": "location",
"type": "nested_collection"
},
"predicate": {
"in_collection": {
"column_name": "coordinates",
"type": "nested_collection"
},
"predicate": {
"column": {
"type": "column",
"name": "elevation"
},
"operator": "range",
"type": "binary_comparison_operator",
"value": {
"type": "scalar",
"value": {
"boost": "",
"gt": "",
"gte": "200",
"lt": "",
"lte": ""
}
}
},
"type": "exists"
},
"type": "exists"
},
"type": "exists"
},
"type": "exists"
}
18 changes: 18 additions & 0 deletions connector/testdata/filter_tests/get_predicate/003/want.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"column": {
"type": "column",
"name": "elevation"
},
"operator": "range",
"type": "binary_comparison_operator",
"value": {
"type": "scalar",
"value": {
"boost": "",
"gt": "",
"gte": "200",
"lt": "",
"lte": ""
}
}
}
26 changes: 26 additions & 0 deletions connector/testdata/filter_tests/get_predicate/004/got.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"in_collection": {
"column_name": "route",
"type": "nested_collection"
},
"predicate": {
"in_collection": {
"column_name": "arrival_airport",
"type": "nested_collection"
},
"predicate": {
"column": {
"type": "column",
"name": "terminals"
},
"operator": "match",
"type": "binary_comparison_operator",
"value": {
"type": "scalar",
"value": "2"
}
},
"type": "exists"
},
"type": "exists"
}
Loading