> optionalOptions() {
- return new JsonFormatFactory().optionalOptions();
- }
-}
diff --git a/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlJsonRowDataSerializationSchema.java b/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlJsonRowDataSerializationSchema.java
deleted file mode 100644
index a34c044..0000000
--- a/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlJsonRowDataSerializationSchema.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.format; /*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.Objects;
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.api.common.serialization.SerializationSchema;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions;
-import org.apache.flink.formats.json.JsonRowDataDeserializationSchema;
-import org.apache.flink.formats.json.RowDataToJsonConverters;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/**
- * Serialization schema that serializes an object of Flink internal data structure into a JSON
- * bytes.
- *
- * Serializes the input Flink object into a JSON string and converts it into byte[]
.
- *
- *
Result byte[]
messages can be deserialized using {@link
- * JsonRowDataDeserializationSchema}.
- *
- *
== CHANGES MADE BY SQRL == - overwrites runtimeConverter with {@link
- * SqrlRowDataToJsonConverters} to add support for SQRL types
- */
-@Internal
-public class SqrlJsonRowDataSerializationSchema implements SerializationSchema {
- private static final long serialVersionUID = 1L;
-
- /** RowType to generate the runtime converter. */
- private final RowType rowType;
-
- /** The converter that converts internal data formats to JsonNode. */
- private final RowDataToJsonConverters.RowDataToJsonConverter runtimeConverter;
-
- /** Object mapper that is used to create output JSON objects. */
- private transient ObjectMapper mapper;
-
- /** Reusable object node. */
- private transient ObjectNode node;
-
- /** Timestamp format specification which is used to parse timestamp. */
- private final TimestampFormat timestampFormat;
-
- /** The handling mode when serializing null keys for map data. */
- private final JsonFormatOptions.MapNullKeyMode mapNullKeyMode;
-
- /** The string literal when handling mode for map null key LITERAL. */
- private final String mapNullKeyLiteral;
-
- /** Flag indicating whether to serialize all decimals as plain numbers. */
- private final boolean encodeDecimalAsPlainNumber;
-
- public SqrlJsonRowDataSerializationSchema(
- RowType rowType,
- TimestampFormat timestampFormat,
- JsonFormatOptions.MapNullKeyMode mapNullKeyMode,
- String mapNullKeyLiteral,
- boolean encodeDecimalAsPlainNumber) {
- this.rowType = rowType;
- this.timestampFormat = timestampFormat;
- this.mapNullKeyMode = mapNullKeyMode;
- this.mapNullKeyLiteral = mapNullKeyLiteral;
- this.encodeDecimalAsPlainNumber = encodeDecimalAsPlainNumber;
- this.runtimeConverter =
- new SqrlRowDataToJsonConverters(timestampFormat, mapNullKeyMode, mapNullKeyLiteral)
- .createConverter(rowType);
- }
-
- @Override
- public void open(InitializationContext context) throws Exception {
- mapper =
- JacksonMapperFactory.createObjectMapper()
- .configure(JsonGenerator.Feature.WRITE_BIGDECIMAL_AS_PLAIN, encodeDecimalAsPlainNumber);
- }
-
- @Override
- public byte[] serialize(RowData row) {
- if (node == null) {
- node = mapper.createObjectNode();
- }
-
- try {
- runtimeConverter.convert(mapper, node, row);
- return mapper.writeValueAsBytes(node);
- } catch (Throwable t) {
- throw new RuntimeException(String.format("Could not serialize row '%s'.", row), t);
- }
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- SqrlJsonRowDataSerializationSchema that = (SqrlJsonRowDataSerializationSchema) o;
- return rowType.equals(that.rowType)
- && timestampFormat.equals(that.timestampFormat)
- && mapNullKeyMode.equals(that.mapNullKeyMode)
- && mapNullKeyLiteral.equals(that.mapNullKeyLiteral)
- && encodeDecimalAsPlainNumber == that.encodeDecimalAsPlainNumber;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(
- rowType, timestampFormat, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
- }
-}
diff --git a/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlRowDataToJsonConverters.java b/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlRowDataToJsonConverters.java
deleted file mode 100644
index cba141c..0000000
--- a/sqrl-flexible-json/src/main/java/com/datasqrl/format/SqrlRowDataToJsonConverters.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.format;
-
-import com.datasqrl.json.FlinkJsonType;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions.MapNullKeyMode;
-import org.apache.flink.formats.json.RowDataToJsonConverters;
-import org.apache.flink.table.data.binary.BinaryRawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RawType;
-
-/**
- * Extends the {@link RowDataToJsonConverters} to add support for FlinkJsonType by serializing it as
- * json and not string
- */
-public class SqrlRowDataToJsonConverters extends RowDataToJsonConverters {
-
- public SqrlRowDataToJsonConverters(
- TimestampFormat timestampFormat, MapNullKeyMode mapNullKeyMode, String mapNullKeyLiteral) {
- super(timestampFormat, mapNullKeyMode, mapNullKeyLiteral);
- }
-
- @Override
- public RowDataToJsonConverter createConverter(LogicalType type) {
-
- switch (type.getTypeRoot()) {
- case RAW:
- // sqrl add raw type
- RawType rawType = (RawType) type;
- if (rawType.getOriginatingClass() == FlinkJsonType.class) {
- return createJsonConverter((RawType) type);
- }
- }
- return super.createConverter(type);
- }
-
- private RowDataToJsonConverter createJsonConverter(RawType type) {
- return (mapper, reuse, value) -> {
- if (value == null) {
- return null;
- }
- BinaryRawValueData binaryRawValueData = (BinaryRawValueData) value;
- FlinkJsonType o = (FlinkJsonType) binaryRawValueData.toObject(type.getTypeSerializer());
- if (o == null) {
- return null;
- }
- return o.getJson();
- };
- }
-}
diff --git a/sqrl-flexible-json/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/sqrl-flexible-json/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
deleted file mode 100644
index 3f24ffc..0000000
--- a/sqrl-flexible-json/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.format.FlexibleJsonFormat
\ No newline at end of file
diff --git a/sqrl-h2-1.16/pom.xml b/sqrl-h2-1.16/pom.xml
deleted file mode 100644
index a8e05a9..0000000
--- a/sqrl-h2-1.16/pom.xml
+++ /dev/null
@@ -1,73 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-h2-1.16
- H2 Sink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 3.2.0-1.19
- provided
-
-
- com.h2database
- h2
- 2.3.232
-
-
- org.apache.flink
- flink-table-common
- 1.19.0
- provided
-
-
-
-
diff --git a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2Dialect.java b/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2Dialect.java
deleted file mode 100644
index b6de027..0000000
--- a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2Dialect.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.flink.connector.jdbc.dialect.h2;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-public class H2Dialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- @Override
- public JdbcRowConverter getRowConverter(RowType rowType) {
- return new H2RowConverter(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("org.h2.Driver");
- }
-
- /**
- * MERGE INTO tableName [ ( columnName [,...] ) ] [ KEY ( columnName [,...] ) ] { VALUES { ( {
- * DEFAULT | expression } [,...] ) } [,...] | select }
- */
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- String uniqueColumns =
- Arrays.stream(uniqueKeyFields).map(this::quoteIdentifier).collect(Collectors.joining(", "));
-
- String columns =
- Arrays.stream(fieldNames).map(this::quoteIdentifier).collect(Collectors.joining(", "));
- String placeholders =
- Arrays.stream(fieldNames).map((f) -> ":" + f).collect(Collectors.joining(", "));
-
- return Optional.of(
- "MERGE INTO "
- + this.quoteIdentifier(tableName)
- + "("
- + columns
- + ")"
- + " KEY ("
- + uniqueColumns
- + ") VALUES ("
- + placeholders
- + ")");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "\"" + identifier + "\"";
- }
-
- @Override
- public String dialectName() {
- return "H2";
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(0, 100000));
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(0, 9));
- }
-
- @Override
- public Set supportedTypes() {
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
- LogicalTypeRoot.ARRAY);
- }
-}
diff --git a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2DialectFactory.java b/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2DialectFactory.java
deleted file mode 100644
index b8501be..0000000
--- a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2DialectFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.flink.connector.jdbc.dialect.h2;
-
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-public class H2DialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:h2:");
- }
-
- @Override
- public JdbcDialect create() {
- return new H2Dialect();
- }
-}
diff --git a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2RowConverter.java b/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2RowConverter.java
deleted file mode 100644
index 8b8bf45..0000000
--- a/sqrl-h2-1.16/src/main/java/com/datasqrl/flink/connector/jdbc/dialect/h2/H2RowConverter.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.flink.connector.jdbc.dialect.h2;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.types.logical.RowType;
-
-public class H2RowConverter extends AbstractJdbcRowConverter {
-
- public H2RowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- public String converterName() {
- return "H2";
- }
-}
diff --git a/sqrl-h2-1.16/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory b/sqrl-h2-1.16/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory
deleted file mode 100644
index 36f3164..0000000
--- a/sqrl-h2-1.16/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.flink.connector.jdbc.dialect.h2.H2DialectFactory
diff --git a/sqrl-jdbc-1.16/README.md b/sqrl-jdbc-1.16/README.md
deleted file mode 100644
index adbcba7..0000000
--- a/sqrl-jdbc-1.16/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Notes: requires flink-csv and flink-json in provided scope
\ No newline at end of file
diff --git a/sqrl-jdbc-1.16/pom.xml b/sqrl-jdbc-1.16/pom.xml
deleted file mode 100644
index 30f24a5..0000000
--- a/sqrl-jdbc-1.16/pom.xml
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-jdbc-1.16
-
- Jdbc sink for flink 1.16
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 1.16.1
- provided
-
-
- org.postgresql
- postgresql
- ${postgres.version}
-
-
- org.testcontainers
- postgresql
- ${testcontainers.version}
- test
-
-
- org.apache.flink
- flink-table-runtime
- 1.16.1
- test
-
-
- org.apache.flink
- flink-table-common
- 1.16.1
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
- ${project.groupId}
- sqrl-flexible-json
- ${project.version}
-
-
- org.apache.flink
- flink-csv
- 1.16.1
- provided
-
-
- org.apache.flink
- flink-json
- 1.16.1
- provided
-
-
- org.apache.flink
- flink-table-planner_2.12
- 1.16.1
- test
-
-
- org.apache.flink
- flink-test-utils
- 1.16.2
- test
-
-
-
-
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
deleted file mode 100644
index 6c76f5e..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static com.datasqrl.type.FlinkArrayTypeUtil.getBaseFlinkArrayType;
-import static com.datasqrl.type.FlinkArrayTypeUtil.isScalarArray;
-import static com.datasqrl.type.PostgresArrayTypeConverter.getArrayScalarName;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
-
-import java.sql.Array;
-import java.sql.PreparedStatement;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.time.LocalDateTime;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.ArrayData;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-/** A sqrl class to handle arrays and extra data types */
-public abstract class SqrlBaseJdbcRowConverter extends AbstractJdbcRowConverter {
-
- public SqrlBaseJdbcRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (type.getTypeRoot() == TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- int timestampWithTimezone = Types.TIMESTAMP_WITH_TIMEZONE;
- return (val, index, statement) -> {
- if (val == null || val.isNullAt(index) || LogicalTypeRoot.NULL.equals(type.getTypeRoot())) {
- statement.setNull(index, timestampWithTimezone);
- } else {
- jdbcSerializationConverter.serialize(val, index, statement);
- }
- };
- } else if (type.getTypeRoot() == ROW) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- } else if (type.getTypeRoot() == MAP) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- }
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- LogicalTypeRoot root = type.getTypeRoot();
-
- if (root == LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- return val ->
- val instanceof LocalDateTime
- ? TimestampData.fromLocalDateTime((LocalDateTime) val)
- : TimestampData.fromTimestamp((Timestamp) val);
- } else if (root == LogicalTypeRoot.ARRAY) {
- ArrayType arrayType = (ArrayType) type;
- return createArrayConverter(arrayType);
- } else if (root == LogicalTypeRoot.ROW) {
- return val -> val;
- } else if (root == LogicalTypeRoot.MAP) {
- return val -> val;
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- final int tsPrecision = ((LocalZonedTimestampType) type).getPrecision();
- return (val, index, statement) ->
- statement.setTimestamp(index, val.getTimestamp(index, tsPrecision).toTimestamp());
- case ARRAY:
- return (val, index, statement) -> setArray(type, val, index, statement);
- case ROW:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MAP:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MULTISET:
- case RAW:
- default:
- return super.createExternalConverter(type);
- }
- }
-
- public abstract void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement);
-
- @SneakyThrows
- public void setArray(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- ArrayData arrayData = val.getArray(index);
- createSqlArrayObject(type, arrayData, idx, flinkPreparedStatement.getStatement());
- }
- }
-
- @SneakyThrows
- private void createSqlArrayObject(
- LogicalType type, ArrayData data, int idx, PreparedStatement statement) {
- // Scalar arrays of any dimension are one array call
- if (isScalarArray(type)) {
- Object[] boxed;
- if (data instanceof GenericArrayData) {
- boxed = ((GenericArrayData) data).toObjectArray();
- } else if (data instanceof BinaryArrayData) {
- boxed = ((BinaryArrayData) data).toObjectArray(getBaseFlinkArrayType(type));
- } else {
- throw new RuntimeException("Unsupported ArrayData type: " + data.getClass());
- }
- Array array = statement.getConnection().createArrayOf(getArrayScalarName(type), boxed);
- statement.setArray(idx, array);
- } else {
- // If it is not a scalar array (e.g. row type), use an empty byte array.
- Array array = statement.getConnection().createArrayOf(getArrayType(), new Byte[0]);
- statement.setArray(idx, array);
- }
- }
-
- protected abstract String getArrayType();
-
- public abstract JdbcDeserializationConverter createArrayConverter(ArrayType arrayType);
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
deleted file mode 100644
index ea13e8e..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.util.Preconditions.checkArgument;
-import static org.apache.flink.util.Preconditions.checkNotNull;
-
-import java.math.BigDecimal;
-import java.sql.Connection;
-import java.sql.Date;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-
-/** SQRL: added getStatement */
-
-/** Simple implementation of {@link FieldNamedPreparedStatement}. */
-public class SqrlFieldNamedPreparedStatementImpl implements FieldNamedPreparedStatement {
-
- private final PreparedStatement statement;
- private final int[][] indexMapping;
-
- private SqrlFieldNamedPreparedStatementImpl(PreparedStatement statement, int[][] indexMapping) {
- this.statement = statement;
- this.indexMapping = indexMapping;
- }
-
- public PreparedStatement getStatement() {
- return statement;
- }
-
- public int[][] getIndexMapping() {
- return indexMapping;
- }
-
- @Override
- public void clearParameters() throws SQLException {
- statement.clearParameters();
- }
-
- @Override
- public ResultSet executeQuery() throws SQLException {
- return statement.executeQuery();
- }
-
- @Override
- public void addBatch() throws SQLException {
- statement.addBatch();
- }
-
- @Override
- public int[] executeBatch() throws SQLException {
- return statement.executeBatch();
- }
-
- @Override
- public void setNull(int fieldIndex, int sqlType) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setNull(index, sqlType);
- }
- }
-
- @Override
- public void setBoolean(int fieldIndex, boolean x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBoolean(index, x);
- }
- }
-
- @Override
- public void setByte(int fieldIndex, byte x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setByte(index, x);
- }
- }
-
- @Override
- public void setShort(int fieldIndex, short x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setShort(index, x);
- }
- }
-
- @Override
- public void setInt(int fieldIndex, int x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setInt(index, x);
- }
- }
-
- @Override
- public void setLong(int fieldIndex, long x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setLong(index, x);
- }
- }
-
- @Override
- public void setFloat(int fieldIndex, float x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setFloat(index, x);
- }
- }
-
- @Override
- public void setDouble(int fieldIndex, double x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDouble(index, x);
- }
- }
-
- @Override
- public void setBigDecimal(int fieldIndex, BigDecimal x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBigDecimal(index, x);
- }
- }
-
- @Override
- public void setString(int fieldIndex, String x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setString(index, x);
- }
- }
-
- @Override
- public void setBytes(int fieldIndex, byte[] x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBytes(index, x);
- }
- }
-
- @Override
- public void setDate(int fieldIndex, Date x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDate(index, x);
- }
- }
-
- @Override
- public void setTime(int fieldIndex, Time x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTime(index, x);
- }
- }
-
- @Override
- public void setTimestamp(int fieldIndex, Timestamp x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTimestamp(index, x);
- }
- }
-
- @Override
- public void setObject(int fieldIndex, Object x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setObject(index, x);
- }
- }
-
- @Override
- public void close() throws SQLException {
- statement.close();
- }
-
- // ----------------------------------------------------------------------------------------
-
- public static FieldNamedPreparedStatement prepareStatement(
- Connection connection, String sql, String[] fieldNames) throws SQLException {
- checkNotNull(connection, "connection must not be null.");
- checkNotNull(sql, "sql must not be null.");
- checkNotNull(fieldNames, "fieldNames must not be null.");
-
- if (sql.contains("?")) {
- throw new IllegalArgumentException("SQL statement must not contain ? character.");
- }
-
- HashMap> parameterMap = new HashMap<>();
- String parsedSQL = parseNamedStatement(sql, parameterMap);
- // currently, the statements must contain all the field parameters
- checkArgument(parameterMap.size() == fieldNames.length);
- int[][] indexMapping = new int[fieldNames.length][];
- for (int i = 0; i < fieldNames.length; i++) {
- String fieldName = fieldNames[i];
- checkArgument(
- parameterMap.containsKey(fieldName),
- fieldName + " doesn't exist in the parameters of SQL statement: " + sql);
- indexMapping[i] = parameterMap.get(fieldName).stream().mapToInt(v -> v).toArray();
- }
-
- return new SqrlFieldNamedPreparedStatementImpl(
- connection.prepareStatement(parsedSQL), indexMapping);
- }
-
- /**
- * Parses a sql with named parameters. The parameter-index mappings are put into the map, and the
- * parsed sql is returned.
- *
- * @param sql sql to parse
- * @param paramMap map to hold parameter-index mappings
- * @return the parsed sql
- */
- public static String parseNamedStatement(String sql, Map> paramMap) {
- StringBuilder parsedSql = new StringBuilder();
- int fieldIndex = 1; // SQL statement parameter index starts from 1
- int length = sql.length();
- for (int i = 0; i < length; i++) {
- char c = sql.charAt(i);
- if (':' == c) {
- int j = i + 1;
- while (j < length && Character.isJavaIdentifierPart(sql.charAt(j))) {
- j++;
- }
- String parameterName = sql.substring(i + 1, j);
- checkArgument(
- !parameterName.isEmpty(), "Named parameters in SQL statement must not be empty.");
- paramMap.computeIfAbsent(parameterName, n -> new ArrayList<>()).add(fieldIndex);
- fieldIndex++;
- i = j - 1;
- parsedSql.append('?');
- } else {
- parsedSql.append(c);
- }
- }
- return parsedSql.toString();
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
deleted file mode 100644
index 91e8097..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.DRIVER;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MISSING_KEY;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_TTL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.MAX_RETRY_TIMEOUT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.PASSWORD;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_AUTO_COMMIT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_FETCH_SIZE;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_COLUMN;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_LOWER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_NUM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_UPPER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_PARALLELISM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.TABLE_NAME;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.URL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.USERNAME;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.configuration.ConfigOption;
-import org.apache.flink.configuration.ConfigOptions;
-import org.apache.flink.configuration.ReadableConfig;
-import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectLoader;
-import org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions;
-import org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSink;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSource;
-import org.apache.flink.table.connector.sink.DynamicTableSink;
-import org.apache.flink.table.connector.source.lookup.LookupOptions;
-import org.apache.flink.table.factories.DynamicTableSinkFactory;
-import org.apache.flink.table.factories.FactoryUtil;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Factory for creating configured instances of {@link JdbcDynamicTableSource} and {@link
- * JdbcDynamicTableSink}.
- */
-@Internal
-public class SqrlJdbcDynamicTableFactory implements DynamicTableSinkFactory {
-
- public static final String IDENTIFIER = "jdbc-sqrl";
-
- @Override
- public DynamicTableSink createDynamicTableSink(Context context) {
- final FactoryUtil.TableFactoryHelper helper =
- FactoryUtil.createTableFactoryHelper(this, context);
- final ReadableConfig config = helper.getOptions();
-
- helper.validate();
- validateConfigOptions(config, context.getClassLoader());
- validateDataTypeWithJdbcDialect(
- context.getPhysicalRowDataType(), config.get(URL), context.getClassLoader());
- JdbcConnectorOptions jdbcOptions = getJdbcOptions(config, context.getClassLoader());
-
- return new JdbcDynamicTableSink(
- jdbcOptions,
- getJdbcExecutionOptions(config),
- getJdbcDmlOptions(
- jdbcOptions, context.getPhysicalRowDataType(), context.getPrimaryKeyIndexes()),
- context.getPhysicalRowDataType());
- }
-
- private static void validateDataTypeWithJdbcDialect(
- DataType dataType, String url, ClassLoader classLoader) {
- JdbcDialect dialect = loadDialect(url, classLoader);
-
- dialect.validate((RowType) dataType.getLogicalType());
- }
-
- private JdbcConnectorOptions getJdbcOptions(
- ReadableConfig readableConfig, ClassLoader classLoader) {
- final String url = readableConfig.get(URL);
- final JdbcConnectorOptions.Builder builder =
- JdbcConnectorOptions.builder()
- .setClassLoader(classLoader)
- .setDBUrl(url)
- .setTableName(readableConfig.get(TABLE_NAME))
- .setDialect(loadDialect(url, classLoader))
- .setParallelism(readableConfig.getOptional(SINK_PARALLELISM).orElse(null))
- .setConnectionCheckTimeoutSeconds(
- (int) readableConfig.get(MAX_RETRY_TIMEOUT).getSeconds());
-
- readableConfig.getOptional(DRIVER).ifPresent(builder::setDriverName);
- readableConfig.getOptional(USERNAME).ifPresent(builder::setUsername);
- readableConfig.getOptional(PASSWORD).ifPresent(builder::setPassword);
- return builder.build();
- }
-
- private static JdbcDialect loadDialect(String url, ClassLoader classLoader) {
- JdbcDialect dialect = JdbcDialectLoader.load(url, classLoader);
- // sqrl: standard postgres dialect with extended dialect
- if (dialect.dialectName().equalsIgnoreCase("PostgreSQL")) {
- return new SqrlPostgresDialect();
- }
- return dialect;
- }
-
- private JdbcExecutionOptions getJdbcExecutionOptions(ReadableConfig config) {
- final JdbcExecutionOptions.Builder builder = new JdbcExecutionOptions.Builder();
- builder.withBatchSize(config.get(SINK_BUFFER_FLUSH_MAX_ROWS));
- builder.withBatchIntervalMs(config.get(SINK_BUFFER_FLUSH_INTERVAL).toMillis());
- builder.withMaxRetries(config.get(SINK_MAX_RETRIES));
- return builder.build();
- }
-
- private JdbcDmlOptions getJdbcDmlOptions(
- JdbcConnectorOptions jdbcOptions, DataType dataType, int[] primaryKeyIndexes) {
-
- String[] keyFields =
- Arrays.stream(primaryKeyIndexes)
- .mapToObj(i -> DataType.getFieldNames(dataType).get(i))
- .toArray(String[]::new);
-
- return JdbcDmlOptions.builder()
- .withTableName(jdbcOptions.getTableName())
- .withDialect(jdbcOptions.getDialect())
- .withFieldNames(DataType.getFieldNames(dataType).toArray(new String[0]))
- .withKeyFields(keyFields.length > 0 ? keyFields : null)
- .build();
- }
-
- @Override
- public String factoryIdentifier() {
- return IDENTIFIER;
- }
-
- @Override
- public Set> requiredOptions() {
- Set> requiredOptions = new HashSet<>();
- requiredOptions.add(URL);
- requiredOptions.add(TABLE_NAME);
- return requiredOptions;
- }
-
- @Override
- public Set> optionalOptions() {
- Set> optionalOptions = new HashSet<>();
- optionalOptions.add(DRIVER);
- optionalOptions.add(USERNAME);
- optionalOptions.add(PASSWORD);
- optionalOptions.add(SCAN_PARTITION_COLUMN);
- optionalOptions.add(SCAN_PARTITION_LOWER_BOUND);
- optionalOptions.add(SCAN_PARTITION_UPPER_BOUND);
- optionalOptions.add(SCAN_PARTITION_NUM);
- optionalOptions.add(SCAN_FETCH_SIZE);
- optionalOptions.add(SCAN_AUTO_COMMIT);
- optionalOptions.add(LOOKUP_CACHE_MAX_ROWS);
- optionalOptions.add(LOOKUP_CACHE_TTL);
- optionalOptions.add(LOOKUP_MAX_RETRIES);
- optionalOptions.add(LOOKUP_CACHE_MISSING_KEY);
- optionalOptions.add(SINK_BUFFER_FLUSH_MAX_ROWS);
- optionalOptions.add(SINK_BUFFER_FLUSH_INTERVAL);
- optionalOptions.add(SINK_MAX_RETRIES);
- optionalOptions.add(SINK_PARALLELISM);
- optionalOptions.add(MAX_RETRY_TIMEOUT);
- optionalOptions.add(LookupOptions.CACHE_TYPE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_MAX_ROWS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY);
- optionalOptions.add(LookupOptions.MAX_RETRIES);
- return optionalOptions;
- }
-
- @Override
- public Set> forwardOptions() {
- return Stream.of(
- URL,
- TABLE_NAME,
- USERNAME,
- PASSWORD,
- DRIVER,
- SINK_BUFFER_FLUSH_MAX_ROWS,
- SINK_BUFFER_FLUSH_INTERVAL,
- SINK_MAX_RETRIES,
- MAX_RETRY_TIMEOUT,
- SCAN_FETCH_SIZE,
- SCAN_AUTO_COMMIT)
- .collect(Collectors.toSet());
- }
-
- private void validateConfigOptions(ReadableConfig config, ClassLoader classLoader) {
- String jdbcUrl = config.get(URL);
- JdbcDialectLoader.load(jdbcUrl, classLoader);
-
- checkAllOrNone(config, new ConfigOption[] {USERNAME, PASSWORD});
-
- checkAllOrNone(
- config,
- new ConfigOption[] {
- SCAN_PARTITION_COLUMN,
- SCAN_PARTITION_NUM,
- SCAN_PARTITION_LOWER_BOUND,
- SCAN_PARTITION_UPPER_BOUND
- });
-
- if (config.getOptional(SCAN_PARTITION_LOWER_BOUND).isPresent()
- && config.getOptional(SCAN_PARTITION_UPPER_BOUND).isPresent()) {
- long lowerBound = config.get(SCAN_PARTITION_LOWER_BOUND);
- long upperBound = config.get(SCAN_PARTITION_UPPER_BOUND);
- if (lowerBound > upperBound) {
- throw new IllegalArgumentException(
- String.format(
- "'%s'='%s' must not be larger than '%s'='%s'.",
- SCAN_PARTITION_LOWER_BOUND.key(),
- lowerBound,
- SCAN_PARTITION_UPPER_BOUND.key(),
- upperBound));
- }
- }
-
- checkAllOrNone(config, new ConfigOption[] {LOOKUP_CACHE_MAX_ROWS, LOOKUP_CACHE_TTL});
-
- if (config.get(LOOKUP_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- LOOKUP_MAX_RETRIES.key(), config.get(LOOKUP_MAX_RETRIES)));
- }
-
- if (config.get(SINK_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- SINK_MAX_RETRIES.key(), config.get(SINK_MAX_RETRIES)));
- }
-
- if (config.get(MAX_RETRY_TIMEOUT).getSeconds() <= 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option must be in second granularity and shouldn't be smaller than 1 second, but is %s.",
- MAX_RETRY_TIMEOUT.key(),
- config.get(
- ConfigOptions.key(MAX_RETRY_TIMEOUT.key()).stringType().noDefaultValue())));
- }
- }
-
- private void checkAllOrNone(ReadableConfig config, ConfigOption>[] configOptions) {
- int presentCount = 0;
- for (ConfigOption configOption : configOptions) {
- if (config.getOptional(configOption).isPresent()) {
- presentCount++;
- }
- }
- String[] propertyNames =
- Arrays.stream(configOptions).map(ConfigOption::key).toArray(String[]::new);
- Preconditions.checkArgument(
- configOptions.length == presentCount || presentCount == 0,
- "Either all or none of the following options should be provided:\n"
- + String.join("\n", propertyNames));
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
deleted file mode 100644
index 4dae46a..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.RowType.RowField;
-
-/**
- * JDBC dialect for PostgreSQL.
- *
- * SQRL: Add quoting to identifiers
- */
-public class SqrlPostgresDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-datetime.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
- private static final int MAX_DECIMAL_PRECISION = 1000;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public JdbcRowConverter getRowConverter(RowType rowType) {
- return new SqrlPostgresRowConverter(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("org.postgresql.Driver");
- }
-
- /** Postgres upsert query. It use ON CONFLICT ... DO UPDATE SET.. to replace into Postgres. */
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- String uniqueColumns =
- Arrays.stream(uniqueKeyFields).map(this::quoteIdentifier).collect(Collectors.joining(", "));
- String updateClause =
- Arrays.stream(fieldNames)
- .map(f -> quoteIdentifier(f) + "=EXCLUDED." + quoteIdentifier(f))
- .collect(Collectors.joining(", "));
- return Optional.of(
- getInsertIntoStatement(tableName, fieldNames)
- + " ON CONFLICT ("
- + uniqueColumns
- + ")"
- + " DO UPDATE SET "
- + updateClause);
- }
-
- @Override
- public void validate(RowType rowType) throws ValidationException {
- List unsupportedTypes =
- rowType.getFields().stream()
- .map(RowField::getType)
- .filter(type -> LogicalTypeRoot.RAW.equals(type.getTypeRoot()))
- .filter(type -> !isSupportedType(type))
- .collect(Collectors.toList());
-
- if (!unsupportedTypes.isEmpty()) {
- throw new ValidationException(
- String.format(
- "The %s dialect doesn't support type: %s.", this.dialectName(), unsupportedTypes));
- }
-
- super.validate(rowType);
- }
-
- private boolean isSupportedType(LogicalType type) {
- return SqrlPostgresRowConverter.sqrlSerializers.containsKey(type.getDefaultConversion());
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "\"" + identifier + "\"";
- }
-
- @Override
- public String dialectName() {
- return "PostgreSQL";
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // The data types used in PostgreSQL are list at:
- // https://www.postgresql.org/docs/12/datatype.html
-
- // TODO: We can't convert BINARY data type to
- // PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO in
- // LegacyTypeInfoDataTypeConverter.
-
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
- LogicalTypeRoot.ARRAY,
- LogicalTypeRoot.MAP,
- LogicalTypeRoot.RAW // see validate() for supported structured types
- );
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
deleted file mode 100644
index 601feff..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import com.datasqrl.type.JdbcTypeSerializer;
-import java.lang.reflect.Type;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.stream.Collectors;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
-import org.postgresql.jdbc.PgArray;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * PostgreSQL.
- *
- * SQRL:Add array support
- */
-public class SqrlPostgresRowConverter extends SqrlBaseJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- public static final Map<
- Type, JdbcTypeSerializer>
- sqrlSerializers = discoverSerializers();
-
- private static Map<
- Type, JdbcTypeSerializer>
- discoverSerializers() {
- return ServiceLoader.load(JdbcTypeSerializer.class).stream()
- .map(f -> f.get())
- .filter(f -> f.getDialectId().equalsIgnoreCase("postgres"))
- .collect(Collectors.toMap(JdbcTypeSerializer::getConversionClass, t -> t));
- }
-
- @Override
- public String converterName() {
- return "PostgreSQL";
- }
-
- public SqrlPostgresRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @SneakyThrows
- public void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- // RowData row = val.getRow(index, ((RowType) type).getFieldCount());
- // java.sql.Array sqlArray = flinkPreparedStatement.getStatement()
- // .getConnection().createArrayOf("bytea", );
- flinkPreparedStatement.getStatement().setBytes(idx, new byte[0]);
- }
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getDeserializerConverter().create();
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return jdbcSerializationConverter::serialize;
- } else {
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getSerializerConverter(type).create();
- } else {
- return super.createExternalConverter(type);
- }
- }
-
- @Override
- protected String getArrayType() {
- return "bytea";
- }
-
- @Override
- public JdbcDeserializationConverter createArrayConverter(ArrayType arrayType) {
- // Since PGJDBC 42.2.15 (https://github.com/pgjdbc/pgjdbc/pull/1194) bytea[] is wrapped in
- // primitive byte arrays
- final Class> elementClass =
- LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
- final JdbcDeserializationConverter elementConverter =
- createNullableInternalConverter(arrayType.getElementType());
- return val -> {
- // sqrl: check if scalar array
-
- Object[] in;
- if (val instanceof PgArray) {
- PgArray pgArray = (PgArray) val;
- in = (Object[]) pgArray.getArray();
- } else {
- in = (Object[]) val;
- }
- final Object[] array =
- (Object[]) java.lang.reflect.Array.newInstance(elementClass, in.length);
- for (int i = 0; i < in.length; i++) {
- array[i] = elementConverter.deserialize(in[i]);
- }
- return new GenericArrayData(array);
- };
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
deleted file mode 100644
index 547c983..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class FlinkArrayTypeUtil {
-
- public static LogicalType getBaseFlinkArrayType(LogicalType type) {
- if (type instanceof ArrayType) {
- return getBaseFlinkArrayType(((ArrayType) type).getElementType());
- }
- return type;
- }
-
- public static boolean isScalarArray(LogicalType type) {
- if (type instanceof ArrayType) {
- LogicalType elementType = ((ArrayType) type).getElementType();
- return isScalar(elementType) || isScalarArray(elementType);
- }
- return false;
- }
-
- public static boolean isScalar(LogicalType type) {
- switch (type.getTypeRoot()) {
- case BOOLEAN:
- case TINYINT:
- case SMALLINT:
- case INTEGER:
- case BIGINT:
- case FLOAT:
- case DOUBLE:
- case CHAR:
- case VARCHAR:
- case BINARY:
- case VARBINARY:
- case DATE:
- case TIME_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- case DECIMAL:
- return true;
- default:
- return false;
- }
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
deleted file mode 100644
index 0a726ee..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.LogicalType;
-
-public interface JdbcTypeSerializer {
-
- String getDialectId();
-
- Class getConversionClass();
-
- String dialectTypeName();
-
- GenericDeserializationConverter getDeserializerConverter();
-
- GenericSerializationConverter getSerializerConverter(LogicalType type);
-
- interface GenericSerializationConverter {
- T create();
- }
-
- interface GenericDeserializationConverter {
- T create();
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
deleted file mode 100644
index 96ee9f9..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class PostgresArrayTypeConverter {
-
- /** Return the base array type for flink type */
- public static String getArrayScalarName(LogicalType type) {
- switch (type.getTypeRoot()) {
- case CHAR:
- case VARCHAR:
- return "text";
- case BOOLEAN:
- return "boolean";
- case BINARY:
- case VARBINARY:
- return "bytea";
- case DECIMAL:
- return "decimal";
- case TINYINT:
- return "smallint";
- case SMALLINT:
- return "smallint";
- case INTEGER:
- return "integer";
- case BIGINT:
- return "bigint";
- case FLOAT:
- return "real"; // PostgreSQL uses REAL for float
- case DOUBLE:
- return "double";
- case DATE:
- return "date";
- case TIME_WITHOUT_TIME_ZONE:
- return "time without time zone";
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- return "timestamp without time zone";
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return "timestamptz";
- case INTERVAL_YEAR_MONTH:
- return "interval year to month";
- case INTERVAL_DAY_TIME:
- return "interval day to second";
- case NULL:
- return "void";
- case ARRAY:
- return getArrayScalarName(((ArrayType) type).getElementType());
- case MULTISET:
- case MAP:
- case ROW:
- case DISTINCT_TYPE:
- case STRUCTURED_TYPE:
- case RAW:
- case SYMBOL:
- case UNRESOLVED:
- default:
- throw new RuntimeException("Cannot convert type to array type");
- }
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
deleted file mode 100644
index 7c0eb5d..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.json.FlinkJsonType;
-import com.datasqrl.json.FlinkJsonTypeSerializer;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.table.data.RawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.postgresql.util.PGobject;
-
-public class PostgresJsonTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return FlinkJsonType.class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () ->
- (val) -> {
- FlinkJsonType t = (FlinkJsonType) val;
- return t.getJson();
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- FlinkJsonTypeSerializer typeSerializer = new FlinkJsonTypeSerializer();
-
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- RawValueData object = val.getRawValue(index);
- FlinkJsonType vec = object.toObject(typeSerializer);
- if (vec == null) {
- statement.setObject(index, null);
- } else {
- pgObject.setValue(vec.getJson().toString());
- statement.setObject(index, pgObject);
- }
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java b/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
deleted file mode 100644
index c86f5d0..0000000
--- a/sqrl-jdbc-1.16/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.format.SqrlRowDataToJsonConverters;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions.MapNullKeyMode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.types.Row;
-import org.postgresql.util.PGobject;
-
-public class PostgresRowTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return Row[].class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () -> {
- return (val) -> null;
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- ObjectMapper mapper = new ObjectMapper();
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- SqrlRowDataToJsonConverters rowDataToJsonConverter =
- new SqrlRowDataToJsonConverters(TimestampFormat.SQL, MapNullKeyMode.DROP, "null");
-
- ArrayType arrayType = (ArrayType) type;
- ObjectNode objectNode = mapper.createObjectNode();
- JsonNode convert =
- rowDataToJsonConverter
- .createConverter(arrayType.getElementType())
- .convert(mapper, objectNode, val);
-
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- pgObject.setValue(convert.toString());
- statement.setObject(index, pgObject);
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.16/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer b/sqrl-jdbc-1.16/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
deleted file mode 100644
index 0673d25..0000000
--- a/sqrl-jdbc-1.16/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
+++ /dev/null
@@ -1,2 +0,0 @@
-com.datasqrl.type.PostgresRowTypeSerializer
-com.datasqrl.type.PostgresJsonTypeSerializer
\ No newline at end of file
diff --git a/sqrl-jdbc-1.16/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/sqrl-jdbc-1.16/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
deleted file mode 100644
index 20a59c9..0000000
--- a/sqrl-jdbc-1.16/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.jdbc.SqrlJdbcDynamicTableFactory
\ No newline at end of file
diff --git a/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java b/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
deleted file mode 100644
index 0e4c0b8..0000000
--- a/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.Statement;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.ResultKind;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.junit5.MiniClusterExtension;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-@ExtendWith(MiniClusterExtension.class)
-public class FlinkJdbcTest {
-
- @Test
- public void testFlinkWithPostgres() throws Exception {
- // Start PostgreSQL container
- try (PostgreSQLContainer> postgres = new PostgreSQLContainer<>("postgres:14")) {
- postgres.start();
- // Establish a connection and create the PostgreSQL table
- try (Connection conn =
- DriverManager.getConnection(
- postgres.getJdbcUrl(), postgres.getUsername(), postgres.getPassword());
- Statement stmt = conn.createStatement()) {
- String createTableSQL = "CREATE TABLE test_table (" + " \"arrayOfRows\" JSONB " + ")";
- stmt.executeUpdate(createTableSQL);
- }
-
- // Set up Flink environment
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
-
- // Define the schema
- String createSourceTable =
- "CREATE TABLE datagen_source ("
- + " arrayOfRows ARRAY> "
- + ") WITH ("
- + " 'connector' = 'datagen',"
- + " 'number-of-rows' = '10'"
- + ")";
-
- String createSinkTable =
- "CREATE TABLE jdbc_sink ("
- + " arrayOfRows RAW('com.datasqrl.json.FlinkJsonType', 'ADFjb20uZGF0YXNxcmwuanNvbi5GbGlua0pzb25UeXBlU2VyaWFsaXplclNuYXBzaG90AAAAAQApY29tLmRhdGFzcXJsLmpzb24uRmxpbmtKc29uVHlwZVNlcmlhbGl6ZXI=') "
- + ") WITH ("
- + " 'connector' = 'jdbc-sqrl', "
- + " 'url' = '"
- + postgres.getJdbcUrl()
- + "', "
- + " 'table-name' = 'test_table', "
- + " 'username' = '"
- + postgres.getUsername()
- + "', "
- + " 'password' = '"
- + postgres.getPassword()
- + "'"
- + ")";
-
- // Register tables in the environment
- tableEnv.executeSql(
- "CREATE TEMPORARY FUNCTION IF NOT EXISTS `tojson` AS 'com.datasqrl.json.ToJson' LANGUAGE JAVA");
- tableEnv.executeSql(createSourceTable);
- tableEnv.executeSql(createSinkTable);
-
- // Set up a simple Flink job
- TableResult tableResult =
- tableEnv.executeSql(
- "INSERT INTO jdbc_sink SELECT tojson(arrayOfRows) AS arrayOfRows FROM datagen_source");
- tableResult.print();
-
- assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind());
- }
- }
-}
diff --git a/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java b/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
deleted file mode 100644
index b6d4448..0000000
--- a/sqrl-jdbc-1.16/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import java.sql.Array;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.GenericRowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.data.writer.BinaryArrayWriter;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.DoubleType;
-import org.apache.flink.table.types.logical.IntType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.TimestampType;
-import org.apache.flink.table.types.logical.VarCharType;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-class PostgresRowConverterTest {
- private static final PostgreSQLContainer postgres = new PostgreSQLContainer("postgres:15.4");
-
- @BeforeAll
- public static void setUp() {
- postgres.start();
- }
-
- @AfterAll
- public static void tearDown() {
- postgres.stop();
- }
-
- private ArrayType doubleArrayType = new ArrayType(new DoubleType());
- private ArrayType timestampArrayType = new ArrayType(new LocalZonedTimestampType());
- private ArrayType doubleArray2DType = new ArrayType(doubleArrayType);
- private RowType sampleRowType = RowType.of(new IntType(), new VarCharType());
-
- private void executeUpdate(Connection connection, String query) throws Exception {
- try (Statement stmt = connection.createStatement()) {
- stmt.executeUpdate(query);
- }
- }
-
- @Test
- public void testArraySerializationAndDeserialization() throws Exception {
- try (Connection connection = postgres.createConnection("")) {
- executeUpdate(
- connection,
- "CREATE TABLE test (id int, int_data int[], double_data double precision[], ts_data timestamptz[], double_data_2d double precision[][], row_data bytea)");
-
- // Set up the converter
- RowType rowType =
- RowType.of(
- new IntType(),
- new ArrayType(new IntType()),
- doubleArrayType,
- timestampArrayType,
- doubleArray2DType,
- sampleRowType);
- SqrlPostgresRowConverter converter = new SqrlPostgresRowConverter(rowType);
-
- // Sample data
- GenericRowData rowData = new GenericRowData(6);
- rowData.setField(0, 1);
-
- // Integer Array - GenericArrayData
- GenericArrayData intArray = new GenericArrayData(new int[] {1, 2, 3});
- rowData.setField(1, intArray);
-
- // Double Array - GenericArrayData
- GenericArrayData doubleArray = new GenericArrayData(new double[] {1.1, 2.2, 3.3});
- rowData.setField(2, doubleArray);
-
- // Timestamp Array - GenericArrayData
- BinaryArrayData array = new BinaryArrayData();
- BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
- final int precision = 3;
- writer.reset();
- writer.writeTimestamp(0, TimestampData.fromEpochMillis(123000L), precision);
- writer.writeTimestamp(1, TimestampData.fromEpochMillis(123000L), precision);
- writer.complete();
- rowData.setField(3, array);
-
- // 2D Double Array - GenericArrayData
- GenericArrayData doubleArray2d =
- new GenericArrayData(new double[][] {{1.1, 2.2}, {3.3, 4.4}});
- rowData.setField(4, doubleArray2d);
-
- // RowType not being an array
- GenericRowData sampleRow = new GenericRowData(2);
- sampleRow.setField(0, 10);
- sampleRow.setField(1, "test");
- rowData.setField(5, sampleRow);
-
- FieldNamedPreparedStatement statement =
- SqrlFieldNamedPreparedStatementImpl.prepareStatement(
- connection,
- "INSERT INTO test (id, int_data, double_data, ts_data, double_data_2d, row_data) VALUES (:id, :int_data, :double_data, :ts_data, :double_data_2d, :row_data)",
- List.of("id", "int_data", "double_data", "ts_data", "double_data_2d", "row_data")
- .toArray(String[]::new));
-
- for (int i = 0; i < rowType.getFieldCount(); i++) {
- JdbcSerializationConverter externalConverter =
- converter.createExternalConverter(rowType.getTypeAt(i));
- externalConverter.serialize(rowData, i, statement);
- }
- statement.addBatch();
-
- int[] result = statement.executeBatch();
- assertEquals(1, result.length);
-
- Statement stmt = connection.createStatement();
- // Deserialize
- ResultSet rs =
- stmt.executeQuery(
- "SELECT int_data, double_data, ts_data, double_data_2d, row_data FROM test WHERE id=1");
- assertTrue(rs.next());
-
- // Assert Integer Array
- Array intArrayRetrieved = rs.getArray("int_data");
- Object intDataDeserialized =
- converter
- .createArrayConverter(new ArrayType(new IntType()))
- .deserialize(intArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(1)).toIntArray(),
- ((GenericArrayData) intDataDeserialized).toIntArray());
-
- // Assert Double Array
- Array doubleArrayRetrieved = rs.getArray("double_data");
- Object doubleDataDeserialized =
- converter.createArrayConverter(doubleArrayType).deserialize(doubleArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(2)).toDoubleArray(),
- ((GenericArrayData) doubleDataDeserialized).toDoubleArray());
-
- // Assert Timestamp Array
- Array timestampArrayRetrieved = rs.getArray("ts_data");
- Object timestampDataDeserialized =
- converter.createArrayConverter(timestampArrayType).deserialize(timestampArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) timestampDataDeserialized).toObjectArray(),
- Arrays.stream(((BinaryArrayData) rowData.getField(3)).toObjectArray(new TimestampType()))
- .toArray());
- // Assert 2D Double Array (it's a bit tricky given the 2D nature)
- Array double2DArrayRetrieved = rs.getArray("double_data_2d");
- Object double2DDataDeserialized =
- converter.createArrayConverter(doubleArray2DType).deserialize(double2DArrayRetrieved);
- // todo: 2d arrays are not well supported
- GenericArrayData field = (GenericArrayData) rowData.getField(4);
- assertNotNull(field);
-
- // todo: Row type not well supported
- Object rowRetrieved = rs.getObject("row_data");
- assertNotNull(rowRetrieved);
- }
- }
-}
diff --git a/sqrl-jdbc-1.17/pom.xml b/sqrl-jdbc-1.17/pom.xml
deleted file mode 100644
index abce1b2..0000000
--- a/sqrl-jdbc-1.17/pom.xml
+++ /dev/null
@@ -1,126 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-jdbc-1.17
- Jdbc sink for flink 1.17
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
- 11
- 11
- UTF-8
- 1.17.2
-
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 3.1.2-1.17
- provided
-
-
- org.postgresql
- postgresql
- ${postgres.version}
-
-
- org.testcontainers
- postgresql
- ${testcontainers.version}
- test
-
-
- org.apache.flink
- flink-table-runtime
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-table-common
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
- ${project.groupId}
- sqrl-flexible-json
- ${project.version}
-
-
- org.apache.flink
- flink-csv
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-json
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-table-planner_2.12
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-test-utils
- ${flink.version}
- test
-
-
-
-
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
deleted file mode 100644
index 6c76f5e..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static com.datasqrl.type.FlinkArrayTypeUtil.getBaseFlinkArrayType;
-import static com.datasqrl.type.FlinkArrayTypeUtil.isScalarArray;
-import static com.datasqrl.type.PostgresArrayTypeConverter.getArrayScalarName;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
-
-import java.sql.Array;
-import java.sql.PreparedStatement;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.time.LocalDateTime;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.ArrayData;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-/** A sqrl class to handle arrays and extra data types */
-public abstract class SqrlBaseJdbcRowConverter extends AbstractJdbcRowConverter {
-
- public SqrlBaseJdbcRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (type.getTypeRoot() == TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- int timestampWithTimezone = Types.TIMESTAMP_WITH_TIMEZONE;
- return (val, index, statement) -> {
- if (val == null || val.isNullAt(index) || LogicalTypeRoot.NULL.equals(type.getTypeRoot())) {
- statement.setNull(index, timestampWithTimezone);
- } else {
- jdbcSerializationConverter.serialize(val, index, statement);
- }
- };
- } else if (type.getTypeRoot() == ROW) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- } else if (type.getTypeRoot() == MAP) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- }
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- LogicalTypeRoot root = type.getTypeRoot();
-
- if (root == LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- return val ->
- val instanceof LocalDateTime
- ? TimestampData.fromLocalDateTime((LocalDateTime) val)
- : TimestampData.fromTimestamp((Timestamp) val);
- } else if (root == LogicalTypeRoot.ARRAY) {
- ArrayType arrayType = (ArrayType) type;
- return createArrayConverter(arrayType);
- } else if (root == LogicalTypeRoot.ROW) {
- return val -> val;
- } else if (root == LogicalTypeRoot.MAP) {
- return val -> val;
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- final int tsPrecision = ((LocalZonedTimestampType) type).getPrecision();
- return (val, index, statement) ->
- statement.setTimestamp(index, val.getTimestamp(index, tsPrecision).toTimestamp());
- case ARRAY:
- return (val, index, statement) -> setArray(type, val, index, statement);
- case ROW:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MAP:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MULTISET:
- case RAW:
- default:
- return super.createExternalConverter(type);
- }
- }
-
- public abstract void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement);
-
- @SneakyThrows
- public void setArray(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- ArrayData arrayData = val.getArray(index);
- createSqlArrayObject(type, arrayData, idx, flinkPreparedStatement.getStatement());
- }
- }
-
- @SneakyThrows
- private void createSqlArrayObject(
- LogicalType type, ArrayData data, int idx, PreparedStatement statement) {
- // Scalar arrays of any dimension are one array call
- if (isScalarArray(type)) {
- Object[] boxed;
- if (data instanceof GenericArrayData) {
- boxed = ((GenericArrayData) data).toObjectArray();
- } else if (data instanceof BinaryArrayData) {
- boxed = ((BinaryArrayData) data).toObjectArray(getBaseFlinkArrayType(type));
- } else {
- throw new RuntimeException("Unsupported ArrayData type: " + data.getClass());
- }
- Array array = statement.getConnection().createArrayOf(getArrayScalarName(type), boxed);
- statement.setArray(idx, array);
- } else {
- // If it is not a scalar array (e.g. row type), use an empty byte array.
- Array array = statement.getConnection().createArrayOf(getArrayType(), new Byte[0]);
- statement.setArray(idx, array);
- }
- }
-
- protected abstract String getArrayType();
-
- public abstract JdbcDeserializationConverter createArrayConverter(ArrayType arrayType);
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
deleted file mode 100644
index ea13e8e..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.util.Preconditions.checkArgument;
-import static org.apache.flink.util.Preconditions.checkNotNull;
-
-import java.math.BigDecimal;
-import java.sql.Connection;
-import java.sql.Date;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-
-/** SQRL: added getStatement */
-
-/** Simple implementation of {@link FieldNamedPreparedStatement}. */
-public class SqrlFieldNamedPreparedStatementImpl implements FieldNamedPreparedStatement {
-
- private final PreparedStatement statement;
- private final int[][] indexMapping;
-
- private SqrlFieldNamedPreparedStatementImpl(PreparedStatement statement, int[][] indexMapping) {
- this.statement = statement;
- this.indexMapping = indexMapping;
- }
-
- public PreparedStatement getStatement() {
- return statement;
- }
-
- public int[][] getIndexMapping() {
- return indexMapping;
- }
-
- @Override
- public void clearParameters() throws SQLException {
- statement.clearParameters();
- }
-
- @Override
- public ResultSet executeQuery() throws SQLException {
- return statement.executeQuery();
- }
-
- @Override
- public void addBatch() throws SQLException {
- statement.addBatch();
- }
-
- @Override
- public int[] executeBatch() throws SQLException {
- return statement.executeBatch();
- }
-
- @Override
- public void setNull(int fieldIndex, int sqlType) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setNull(index, sqlType);
- }
- }
-
- @Override
- public void setBoolean(int fieldIndex, boolean x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBoolean(index, x);
- }
- }
-
- @Override
- public void setByte(int fieldIndex, byte x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setByte(index, x);
- }
- }
-
- @Override
- public void setShort(int fieldIndex, short x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setShort(index, x);
- }
- }
-
- @Override
- public void setInt(int fieldIndex, int x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setInt(index, x);
- }
- }
-
- @Override
- public void setLong(int fieldIndex, long x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setLong(index, x);
- }
- }
-
- @Override
- public void setFloat(int fieldIndex, float x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setFloat(index, x);
- }
- }
-
- @Override
- public void setDouble(int fieldIndex, double x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDouble(index, x);
- }
- }
-
- @Override
- public void setBigDecimal(int fieldIndex, BigDecimal x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBigDecimal(index, x);
- }
- }
-
- @Override
- public void setString(int fieldIndex, String x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setString(index, x);
- }
- }
-
- @Override
- public void setBytes(int fieldIndex, byte[] x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBytes(index, x);
- }
- }
-
- @Override
- public void setDate(int fieldIndex, Date x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDate(index, x);
- }
- }
-
- @Override
- public void setTime(int fieldIndex, Time x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTime(index, x);
- }
- }
-
- @Override
- public void setTimestamp(int fieldIndex, Timestamp x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTimestamp(index, x);
- }
- }
-
- @Override
- public void setObject(int fieldIndex, Object x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setObject(index, x);
- }
- }
-
- @Override
- public void close() throws SQLException {
- statement.close();
- }
-
- // ----------------------------------------------------------------------------------------
-
- public static FieldNamedPreparedStatement prepareStatement(
- Connection connection, String sql, String[] fieldNames) throws SQLException {
- checkNotNull(connection, "connection must not be null.");
- checkNotNull(sql, "sql must not be null.");
- checkNotNull(fieldNames, "fieldNames must not be null.");
-
- if (sql.contains("?")) {
- throw new IllegalArgumentException("SQL statement must not contain ? character.");
- }
-
- HashMap> parameterMap = new HashMap<>();
- String parsedSQL = parseNamedStatement(sql, parameterMap);
- // currently, the statements must contain all the field parameters
- checkArgument(parameterMap.size() == fieldNames.length);
- int[][] indexMapping = new int[fieldNames.length][];
- for (int i = 0; i < fieldNames.length; i++) {
- String fieldName = fieldNames[i];
- checkArgument(
- parameterMap.containsKey(fieldName),
- fieldName + " doesn't exist in the parameters of SQL statement: " + sql);
- indexMapping[i] = parameterMap.get(fieldName).stream().mapToInt(v -> v).toArray();
- }
-
- return new SqrlFieldNamedPreparedStatementImpl(
- connection.prepareStatement(parsedSQL), indexMapping);
- }
-
- /**
- * Parses a sql with named parameters. The parameter-index mappings are put into the map, and the
- * parsed sql is returned.
- *
- * @param sql sql to parse
- * @param paramMap map to hold parameter-index mappings
- * @return the parsed sql
- */
- public static String parseNamedStatement(String sql, Map> paramMap) {
- StringBuilder parsedSql = new StringBuilder();
- int fieldIndex = 1; // SQL statement parameter index starts from 1
- int length = sql.length();
- for (int i = 0; i < length; i++) {
- char c = sql.charAt(i);
- if (':' == c) {
- int j = i + 1;
- while (j < length && Character.isJavaIdentifierPart(sql.charAt(j))) {
- j++;
- }
- String parameterName = sql.substring(i + 1, j);
- checkArgument(
- !parameterName.isEmpty(), "Named parameters in SQL statement must not be empty.");
- paramMap.computeIfAbsent(parameterName, n -> new ArrayList<>()).add(fieldIndex);
- fieldIndex++;
- i = j - 1;
- parsedSql.append('?');
- } else {
- parsedSql.append(c);
- }
- }
- return parsedSql.toString();
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
deleted file mode 100644
index 05a8c6f..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.DRIVER;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MISSING_KEY;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_TTL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.MAX_RETRY_TIMEOUT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.PASSWORD;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_AUTO_COMMIT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_FETCH_SIZE;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_COLUMN;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_LOWER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_NUM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_UPPER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_PARALLELISM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.TABLE_NAME;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.URL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.USERNAME;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.configuration.ConfigOption;
-import org.apache.flink.configuration.ConfigOptions;
-import org.apache.flink.configuration.ReadableConfig;
-import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectLoader;
-import org.apache.flink.connector.jdbc.internal.options.InternalJdbcConnectionOptions;
-import org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSink;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSource;
-import org.apache.flink.table.connector.sink.DynamicTableSink;
-import org.apache.flink.table.connector.source.lookup.LookupOptions;
-import org.apache.flink.table.factories.DynamicTableSinkFactory;
-import org.apache.flink.table.factories.FactoryUtil;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Factory for creating configured instances of {@link JdbcDynamicTableSource} and {@link
- * JdbcDynamicTableSink}.
- */
-@Internal
-public class SqrlJdbcDynamicTableFactory implements DynamicTableSinkFactory {
-
- public static final String IDENTIFIER = "jdbc-sqrl";
-
- @Override
- public DynamicTableSink createDynamicTableSink(Context context) {
- final FactoryUtil.TableFactoryHelper helper =
- FactoryUtil.createTableFactoryHelper(this, context);
- final ReadableConfig config = helper.getOptions();
-
- helper.validate();
- validateConfigOptions(config, context.getClassLoader());
- validateDataTypeWithJdbcDialect(
- context.getPhysicalRowDataType(), config.get(URL), context.getClassLoader());
- InternalJdbcConnectionOptions jdbcOptions = getJdbcOptions(config, context.getClassLoader());
-
- return new JdbcDynamicTableSink(
- jdbcOptions,
- getJdbcExecutionOptions(config),
- getJdbcDmlOptions(
- jdbcOptions, context.getPhysicalRowDataType(), context.getPrimaryKeyIndexes()),
- context.getPhysicalRowDataType());
- }
-
- private static void validateDataTypeWithJdbcDialect(
- DataType dataType, String url, ClassLoader classLoader) {
- JdbcDialect dialect = loadDialect(url, classLoader);
-
- dialect.validate((RowType) dataType.getLogicalType());
- }
-
- private InternalJdbcConnectionOptions getJdbcOptions(
- ReadableConfig readableConfig, ClassLoader classLoader) {
- final String url = readableConfig.get(URL);
- final InternalJdbcConnectionOptions.Builder builder =
- InternalJdbcConnectionOptions.builder()
- .setClassLoader(classLoader)
- .setDBUrl(url)
- .setTableName(readableConfig.get(TABLE_NAME))
- .setDialect(loadDialect(url, classLoader))
- .setParallelism(readableConfig.getOptional(SINK_PARALLELISM).orElse(null))
- .setConnectionCheckTimeoutSeconds(
- (int) readableConfig.get(MAX_RETRY_TIMEOUT).getSeconds());
-
- readableConfig.getOptional(DRIVER).ifPresent(builder::setDriverName);
- readableConfig.getOptional(USERNAME).ifPresent(builder::setUsername);
- readableConfig.getOptional(PASSWORD).ifPresent(builder::setPassword);
- return builder.build();
- }
-
- private static JdbcDialect loadDialect(String url, ClassLoader classLoader) {
- JdbcDialect dialect = JdbcDialectLoader.load(url, classLoader);
- // sqrl: standard postgres dialect with extended dialect
- if (dialect.dialectName().equalsIgnoreCase("PostgreSQL")) {
- return new SqrlPostgresDialect();
- }
- return dialect;
- }
-
- private JdbcExecutionOptions getJdbcExecutionOptions(ReadableConfig config) {
- final JdbcExecutionOptions.Builder builder = new JdbcExecutionOptions.Builder();
- builder.withBatchSize(config.get(SINK_BUFFER_FLUSH_MAX_ROWS));
- builder.withBatchIntervalMs(config.get(SINK_BUFFER_FLUSH_INTERVAL).toMillis());
- builder.withMaxRetries(config.get(SINK_MAX_RETRIES));
- return builder.build();
- }
-
- private JdbcDmlOptions getJdbcDmlOptions(
- InternalJdbcConnectionOptions jdbcOptions, DataType dataType, int[] primaryKeyIndexes) {
-
- String[] keyFields =
- Arrays.stream(primaryKeyIndexes)
- .mapToObj(i -> DataType.getFieldNames(dataType).get(i))
- .toArray(String[]::new);
-
- return JdbcDmlOptions.builder()
- .withTableName(jdbcOptions.getTableName())
- .withDialect(jdbcOptions.getDialect())
- .withFieldNames(DataType.getFieldNames(dataType).toArray(new String[0]))
- .withKeyFields(keyFields.length > 0 ? keyFields : null)
- .build();
- }
-
- @Override
- public String factoryIdentifier() {
- return IDENTIFIER;
- }
-
- @Override
- public Set> requiredOptions() {
- Set> requiredOptions = new HashSet<>();
- requiredOptions.add(URL);
- requiredOptions.add(TABLE_NAME);
- return requiredOptions;
- }
-
- @Override
- public Set> optionalOptions() {
- Set> optionalOptions = new HashSet<>();
- optionalOptions.add(DRIVER);
- optionalOptions.add(USERNAME);
- optionalOptions.add(PASSWORD);
- optionalOptions.add(SCAN_PARTITION_COLUMN);
- optionalOptions.add(SCAN_PARTITION_LOWER_BOUND);
- optionalOptions.add(SCAN_PARTITION_UPPER_BOUND);
- optionalOptions.add(SCAN_PARTITION_NUM);
- optionalOptions.add(SCAN_FETCH_SIZE);
- optionalOptions.add(SCAN_AUTO_COMMIT);
- optionalOptions.add(LOOKUP_CACHE_MAX_ROWS);
- optionalOptions.add(LOOKUP_CACHE_TTL);
- optionalOptions.add(LOOKUP_MAX_RETRIES);
- optionalOptions.add(LOOKUP_CACHE_MISSING_KEY);
- optionalOptions.add(SINK_BUFFER_FLUSH_MAX_ROWS);
- optionalOptions.add(SINK_BUFFER_FLUSH_INTERVAL);
- optionalOptions.add(SINK_MAX_RETRIES);
- optionalOptions.add(SINK_PARALLELISM);
- optionalOptions.add(MAX_RETRY_TIMEOUT);
- optionalOptions.add(LookupOptions.CACHE_TYPE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_MAX_ROWS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY);
- optionalOptions.add(LookupOptions.MAX_RETRIES);
- return optionalOptions;
- }
-
- @Override
- public Set> forwardOptions() {
- return Stream.of(
- URL,
- TABLE_NAME,
- USERNAME,
- PASSWORD,
- DRIVER,
- SINK_BUFFER_FLUSH_MAX_ROWS,
- SINK_BUFFER_FLUSH_INTERVAL,
- SINK_MAX_RETRIES,
- MAX_RETRY_TIMEOUT,
- SCAN_FETCH_SIZE,
- SCAN_AUTO_COMMIT)
- .collect(Collectors.toSet());
- }
-
- private void validateConfigOptions(ReadableConfig config, ClassLoader classLoader) {
- String jdbcUrl = config.get(URL);
- // JdbcDialectLoader.load(jdbcUrl, classLoader);
-
- checkAllOrNone(config, new ConfigOption[] {USERNAME, PASSWORD});
-
- checkAllOrNone(
- config,
- new ConfigOption[] {
- SCAN_PARTITION_COLUMN,
- SCAN_PARTITION_NUM,
- SCAN_PARTITION_LOWER_BOUND,
- SCAN_PARTITION_UPPER_BOUND
- });
-
- if (config.getOptional(SCAN_PARTITION_LOWER_BOUND).isPresent()
- && config.getOptional(SCAN_PARTITION_UPPER_BOUND).isPresent()) {
- long lowerBound = config.get(SCAN_PARTITION_LOWER_BOUND);
- long upperBound = config.get(SCAN_PARTITION_UPPER_BOUND);
- if (lowerBound > upperBound) {
- throw new IllegalArgumentException(
- String.format(
- "'%s'='%s' must not be larger than '%s'='%s'.",
- SCAN_PARTITION_LOWER_BOUND.key(),
- lowerBound,
- SCAN_PARTITION_UPPER_BOUND.key(),
- upperBound));
- }
- }
-
- checkAllOrNone(config, new ConfigOption[] {LOOKUP_CACHE_MAX_ROWS, LOOKUP_CACHE_TTL});
-
- if (config.get(LOOKUP_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- LOOKUP_MAX_RETRIES.key(), config.get(LOOKUP_MAX_RETRIES)));
- }
-
- if (config.get(SINK_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- SINK_MAX_RETRIES.key(), config.get(SINK_MAX_RETRIES)));
- }
-
- if (config.get(MAX_RETRY_TIMEOUT).getSeconds() <= 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option must be in second granularity and shouldn't be smaller than 1 second, but is %s.",
- MAX_RETRY_TIMEOUT.key(),
- config.get(
- ConfigOptions.key(MAX_RETRY_TIMEOUT.key()).stringType().noDefaultValue())));
- }
- }
-
- private void checkAllOrNone(ReadableConfig config, ConfigOption>[] configOptions) {
- int presentCount = 0;
- for (ConfigOption configOption : configOptions) {
- if (config.getOptional(configOption).isPresent()) {
- presentCount++;
- }
- }
- String[] propertyNames =
- Arrays.stream(configOptions).map(ConfigOption::key).toArray(String[]::new);
- Preconditions.checkArgument(
- configOptions.length == presentCount || presentCount == 0,
- "Either all or none of the following options should be provided:\n"
- + String.join("\n", propertyNames));
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
deleted file mode 100644
index 4dae46a..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.RowType.RowField;
-
-/**
- * JDBC dialect for PostgreSQL.
- *
- * SQRL: Add quoting to identifiers
- */
-public class SqrlPostgresDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-datetime.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
- private static final int MAX_DECIMAL_PRECISION = 1000;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public JdbcRowConverter getRowConverter(RowType rowType) {
- return new SqrlPostgresRowConverter(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("org.postgresql.Driver");
- }
-
- /** Postgres upsert query. It use ON CONFLICT ... DO UPDATE SET.. to replace into Postgres. */
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- String uniqueColumns =
- Arrays.stream(uniqueKeyFields).map(this::quoteIdentifier).collect(Collectors.joining(", "));
- String updateClause =
- Arrays.stream(fieldNames)
- .map(f -> quoteIdentifier(f) + "=EXCLUDED." + quoteIdentifier(f))
- .collect(Collectors.joining(", "));
- return Optional.of(
- getInsertIntoStatement(tableName, fieldNames)
- + " ON CONFLICT ("
- + uniqueColumns
- + ")"
- + " DO UPDATE SET "
- + updateClause);
- }
-
- @Override
- public void validate(RowType rowType) throws ValidationException {
- List unsupportedTypes =
- rowType.getFields().stream()
- .map(RowField::getType)
- .filter(type -> LogicalTypeRoot.RAW.equals(type.getTypeRoot()))
- .filter(type -> !isSupportedType(type))
- .collect(Collectors.toList());
-
- if (!unsupportedTypes.isEmpty()) {
- throw new ValidationException(
- String.format(
- "The %s dialect doesn't support type: %s.", this.dialectName(), unsupportedTypes));
- }
-
- super.validate(rowType);
- }
-
- private boolean isSupportedType(LogicalType type) {
- return SqrlPostgresRowConverter.sqrlSerializers.containsKey(type.getDefaultConversion());
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "\"" + identifier + "\"";
- }
-
- @Override
- public String dialectName() {
- return "PostgreSQL";
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // The data types used in PostgreSQL are list at:
- // https://www.postgresql.org/docs/12/datatype.html
-
- // TODO: We can't convert BINARY data type to
- // PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO in
- // LegacyTypeInfoDataTypeConverter.
-
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
- LogicalTypeRoot.ARRAY,
- LogicalTypeRoot.MAP,
- LogicalTypeRoot.RAW // see validate() for supported structured types
- );
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
deleted file mode 100644
index 601feff..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import com.datasqrl.type.JdbcTypeSerializer;
-import java.lang.reflect.Type;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.stream.Collectors;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
-import org.postgresql.jdbc.PgArray;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * PostgreSQL.
- *
- * SQRL:Add array support
- */
-public class SqrlPostgresRowConverter extends SqrlBaseJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- public static final Map<
- Type, JdbcTypeSerializer>
- sqrlSerializers = discoverSerializers();
-
- private static Map<
- Type, JdbcTypeSerializer>
- discoverSerializers() {
- return ServiceLoader.load(JdbcTypeSerializer.class).stream()
- .map(f -> f.get())
- .filter(f -> f.getDialectId().equalsIgnoreCase("postgres"))
- .collect(Collectors.toMap(JdbcTypeSerializer::getConversionClass, t -> t));
- }
-
- @Override
- public String converterName() {
- return "PostgreSQL";
- }
-
- public SqrlPostgresRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @SneakyThrows
- public void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- // RowData row = val.getRow(index, ((RowType) type).getFieldCount());
- // java.sql.Array sqlArray = flinkPreparedStatement.getStatement()
- // .getConnection().createArrayOf("bytea", );
- flinkPreparedStatement.getStatement().setBytes(idx, new byte[0]);
- }
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getDeserializerConverter().create();
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return jdbcSerializationConverter::serialize;
- } else {
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getSerializerConverter(type).create();
- } else {
- return super.createExternalConverter(type);
- }
- }
-
- @Override
- protected String getArrayType() {
- return "bytea";
- }
-
- @Override
- public JdbcDeserializationConverter createArrayConverter(ArrayType arrayType) {
- // Since PGJDBC 42.2.15 (https://github.com/pgjdbc/pgjdbc/pull/1194) bytea[] is wrapped in
- // primitive byte arrays
- final Class> elementClass =
- LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
- final JdbcDeserializationConverter elementConverter =
- createNullableInternalConverter(arrayType.getElementType());
- return val -> {
- // sqrl: check if scalar array
-
- Object[] in;
- if (val instanceof PgArray) {
- PgArray pgArray = (PgArray) val;
- in = (Object[]) pgArray.getArray();
- } else {
- in = (Object[]) val;
- }
- final Object[] array =
- (Object[]) java.lang.reflect.Array.newInstance(elementClass, in.length);
- for (int i = 0; i < in.length; i++) {
- array[i] = elementConverter.deserialize(in[i]);
- }
- return new GenericArrayData(array);
- };
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
deleted file mode 100644
index 547c983..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class FlinkArrayTypeUtil {
-
- public static LogicalType getBaseFlinkArrayType(LogicalType type) {
- if (type instanceof ArrayType) {
- return getBaseFlinkArrayType(((ArrayType) type).getElementType());
- }
- return type;
- }
-
- public static boolean isScalarArray(LogicalType type) {
- if (type instanceof ArrayType) {
- LogicalType elementType = ((ArrayType) type).getElementType();
- return isScalar(elementType) || isScalarArray(elementType);
- }
- return false;
- }
-
- public static boolean isScalar(LogicalType type) {
- switch (type.getTypeRoot()) {
- case BOOLEAN:
- case TINYINT:
- case SMALLINT:
- case INTEGER:
- case BIGINT:
- case FLOAT:
- case DOUBLE:
- case CHAR:
- case VARCHAR:
- case BINARY:
- case VARBINARY:
- case DATE:
- case TIME_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- case DECIMAL:
- return true;
- default:
- return false;
- }
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
deleted file mode 100644
index 0a726ee..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.LogicalType;
-
-public interface JdbcTypeSerializer {
-
- String getDialectId();
-
- Class getConversionClass();
-
- String dialectTypeName();
-
- GenericDeserializationConverter getDeserializerConverter();
-
- GenericSerializationConverter getSerializerConverter(LogicalType type);
-
- interface GenericSerializationConverter {
- T create();
- }
-
- interface GenericDeserializationConverter {
- T create();
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
deleted file mode 100644
index 96ee9f9..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class PostgresArrayTypeConverter {
-
- /** Return the base array type for flink type */
- public static String getArrayScalarName(LogicalType type) {
- switch (type.getTypeRoot()) {
- case CHAR:
- case VARCHAR:
- return "text";
- case BOOLEAN:
- return "boolean";
- case BINARY:
- case VARBINARY:
- return "bytea";
- case DECIMAL:
- return "decimal";
- case TINYINT:
- return "smallint";
- case SMALLINT:
- return "smallint";
- case INTEGER:
- return "integer";
- case BIGINT:
- return "bigint";
- case FLOAT:
- return "real"; // PostgreSQL uses REAL for float
- case DOUBLE:
- return "double";
- case DATE:
- return "date";
- case TIME_WITHOUT_TIME_ZONE:
- return "time without time zone";
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- return "timestamp without time zone";
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return "timestamptz";
- case INTERVAL_YEAR_MONTH:
- return "interval year to month";
- case INTERVAL_DAY_TIME:
- return "interval day to second";
- case NULL:
- return "void";
- case ARRAY:
- return getArrayScalarName(((ArrayType) type).getElementType());
- case MULTISET:
- case MAP:
- case ROW:
- case DISTINCT_TYPE:
- case STRUCTURED_TYPE:
- case RAW:
- case SYMBOL:
- case UNRESOLVED:
- default:
- throw new RuntimeException("Cannot convert type to array type");
- }
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
deleted file mode 100644
index 7c0eb5d..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.json.FlinkJsonType;
-import com.datasqrl.json.FlinkJsonTypeSerializer;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.table.data.RawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.postgresql.util.PGobject;
-
-public class PostgresJsonTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return FlinkJsonType.class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () ->
- (val) -> {
- FlinkJsonType t = (FlinkJsonType) val;
- return t.getJson();
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- FlinkJsonTypeSerializer typeSerializer = new FlinkJsonTypeSerializer();
-
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- RawValueData object = val.getRawValue(index);
- FlinkJsonType vec = object.toObject(typeSerializer);
- if (vec == null) {
- statement.setObject(index, null);
- } else {
- pgObject.setValue(vec.getJson().toString());
- statement.setObject(index, pgObject);
- }
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java b/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
deleted file mode 100644
index c86f5d0..0000000
--- a/sqrl-jdbc-1.17/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.format.SqrlRowDataToJsonConverters;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions.MapNullKeyMode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.types.Row;
-import org.postgresql.util.PGobject;
-
-public class PostgresRowTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return Row[].class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () -> {
- return (val) -> null;
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- ObjectMapper mapper = new ObjectMapper();
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- SqrlRowDataToJsonConverters rowDataToJsonConverter =
- new SqrlRowDataToJsonConverters(TimestampFormat.SQL, MapNullKeyMode.DROP, "null");
-
- ArrayType arrayType = (ArrayType) type;
- ObjectNode objectNode = mapper.createObjectNode();
- JsonNode convert =
- rowDataToJsonConverter
- .createConverter(arrayType.getElementType())
- .convert(mapper, objectNode, val);
-
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- pgObject.setValue(convert.toString());
- statement.setObject(index, pgObject);
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.17/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer b/sqrl-jdbc-1.17/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
deleted file mode 100644
index 0673d25..0000000
--- a/sqrl-jdbc-1.17/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
+++ /dev/null
@@ -1,2 +0,0 @@
-com.datasqrl.type.PostgresRowTypeSerializer
-com.datasqrl.type.PostgresJsonTypeSerializer
\ No newline at end of file
diff --git a/sqrl-jdbc-1.17/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/sqrl-jdbc-1.17/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
deleted file mode 100644
index 20a59c9..0000000
--- a/sqrl-jdbc-1.17/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.jdbc.SqrlJdbcDynamicTableFactory
\ No newline at end of file
diff --git a/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java b/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
deleted file mode 100644
index 0e4c0b8..0000000
--- a/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.Statement;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.ResultKind;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.junit5.MiniClusterExtension;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-@ExtendWith(MiniClusterExtension.class)
-public class FlinkJdbcTest {
-
- @Test
- public void testFlinkWithPostgres() throws Exception {
- // Start PostgreSQL container
- try (PostgreSQLContainer> postgres = new PostgreSQLContainer<>("postgres:14")) {
- postgres.start();
- // Establish a connection and create the PostgreSQL table
- try (Connection conn =
- DriverManager.getConnection(
- postgres.getJdbcUrl(), postgres.getUsername(), postgres.getPassword());
- Statement stmt = conn.createStatement()) {
- String createTableSQL = "CREATE TABLE test_table (" + " \"arrayOfRows\" JSONB " + ")";
- stmt.executeUpdate(createTableSQL);
- }
-
- // Set up Flink environment
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
-
- // Define the schema
- String createSourceTable =
- "CREATE TABLE datagen_source ("
- + " arrayOfRows ARRAY> "
- + ") WITH ("
- + " 'connector' = 'datagen',"
- + " 'number-of-rows' = '10'"
- + ")";
-
- String createSinkTable =
- "CREATE TABLE jdbc_sink ("
- + " arrayOfRows RAW('com.datasqrl.json.FlinkJsonType', 'ADFjb20uZGF0YXNxcmwuanNvbi5GbGlua0pzb25UeXBlU2VyaWFsaXplclNuYXBzaG90AAAAAQApY29tLmRhdGFzcXJsLmpzb24uRmxpbmtKc29uVHlwZVNlcmlhbGl6ZXI=') "
- + ") WITH ("
- + " 'connector' = 'jdbc-sqrl', "
- + " 'url' = '"
- + postgres.getJdbcUrl()
- + "', "
- + " 'table-name' = 'test_table', "
- + " 'username' = '"
- + postgres.getUsername()
- + "', "
- + " 'password' = '"
- + postgres.getPassword()
- + "'"
- + ")";
-
- // Register tables in the environment
- tableEnv.executeSql(
- "CREATE TEMPORARY FUNCTION IF NOT EXISTS `tojson` AS 'com.datasqrl.json.ToJson' LANGUAGE JAVA");
- tableEnv.executeSql(createSourceTable);
- tableEnv.executeSql(createSinkTable);
-
- // Set up a simple Flink job
- TableResult tableResult =
- tableEnv.executeSql(
- "INSERT INTO jdbc_sink SELECT tojson(arrayOfRows) AS arrayOfRows FROM datagen_source");
- tableResult.print();
-
- assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind());
- }
- }
-}
diff --git a/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java b/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
deleted file mode 100644
index b6d4448..0000000
--- a/sqrl-jdbc-1.17/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import java.sql.Array;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.GenericRowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.data.writer.BinaryArrayWriter;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.DoubleType;
-import org.apache.flink.table.types.logical.IntType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.TimestampType;
-import org.apache.flink.table.types.logical.VarCharType;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-class PostgresRowConverterTest {
- private static final PostgreSQLContainer postgres = new PostgreSQLContainer("postgres:15.4");
-
- @BeforeAll
- public static void setUp() {
- postgres.start();
- }
-
- @AfterAll
- public static void tearDown() {
- postgres.stop();
- }
-
- private ArrayType doubleArrayType = new ArrayType(new DoubleType());
- private ArrayType timestampArrayType = new ArrayType(new LocalZonedTimestampType());
- private ArrayType doubleArray2DType = new ArrayType(doubleArrayType);
- private RowType sampleRowType = RowType.of(new IntType(), new VarCharType());
-
- private void executeUpdate(Connection connection, String query) throws Exception {
- try (Statement stmt = connection.createStatement()) {
- stmt.executeUpdate(query);
- }
- }
-
- @Test
- public void testArraySerializationAndDeserialization() throws Exception {
- try (Connection connection = postgres.createConnection("")) {
- executeUpdate(
- connection,
- "CREATE TABLE test (id int, int_data int[], double_data double precision[], ts_data timestamptz[], double_data_2d double precision[][], row_data bytea)");
-
- // Set up the converter
- RowType rowType =
- RowType.of(
- new IntType(),
- new ArrayType(new IntType()),
- doubleArrayType,
- timestampArrayType,
- doubleArray2DType,
- sampleRowType);
- SqrlPostgresRowConverter converter = new SqrlPostgresRowConverter(rowType);
-
- // Sample data
- GenericRowData rowData = new GenericRowData(6);
- rowData.setField(0, 1);
-
- // Integer Array - GenericArrayData
- GenericArrayData intArray = new GenericArrayData(new int[] {1, 2, 3});
- rowData.setField(1, intArray);
-
- // Double Array - GenericArrayData
- GenericArrayData doubleArray = new GenericArrayData(new double[] {1.1, 2.2, 3.3});
- rowData.setField(2, doubleArray);
-
- // Timestamp Array - GenericArrayData
- BinaryArrayData array = new BinaryArrayData();
- BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
- final int precision = 3;
- writer.reset();
- writer.writeTimestamp(0, TimestampData.fromEpochMillis(123000L), precision);
- writer.writeTimestamp(1, TimestampData.fromEpochMillis(123000L), precision);
- writer.complete();
- rowData.setField(3, array);
-
- // 2D Double Array - GenericArrayData
- GenericArrayData doubleArray2d =
- new GenericArrayData(new double[][] {{1.1, 2.2}, {3.3, 4.4}});
- rowData.setField(4, doubleArray2d);
-
- // RowType not being an array
- GenericRowData sampleRow = new GenericRowData(2);
- sampleRow.setField(0, 10);
- sampleRow.setField(1, "test");
- rowData.setField(5, sampleRow);
-
- FieldNamedPreparedStatement statement =
- SqrlFieldNamedPreparedStatementImpl.prepareStatement(
- connection,
- "INSERT INTO test (id, int_data, double_data, ts_data, double_data_2d, row_data) VALUES (:id, :int_data, :double_data, :ts_data, :double_data_2d, :row_data)",
- List.of("id", "int_data", "double_data", "ts_data", "double_data_2d", "row_data")
- .toArray(String[]::new));
-
- for (int i = 0; i < rowType.getFieldCount(); i++) {
- JdbcSerializationConverter externalConverter =
- converter.createExternalConverter(rowType.getTypeAt(i));
- externalConverter.serialize(rowData, i, statement);
- }
- statement.addBatch();
-
- int[] result = statement.executeBatch();
- assertEquals(1, result.length);
-
- Statement stmt = connection.createStatement();
- // Deserialize
- ResultSet rs =
- stmt.executeQuery(
- "SELECT int_data, double_data, ts_data, double_data_2d, row_data FROM test WHERE id=1");
- assertTrue(rs.next());
-
- // Assert Integer Array
- Array intArrayRetrieved = rs.getArray("int_data");
- Object intDataDeserialized =
- converter
- .createArrayConverter(new ArrayType(new IntType()))
- .deserialize(intArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(1)).toIntArray(),
- ((GenericArrayData) intDataDeserialized).toIntArray());
-
- // Assert Double Array
- Array doubleArrayRetrieved = rs.getArray("double_data");
- Object doubleDataDeserialized =
- converter.createArrayConverter(doubleArrayType).deserialize(doubleArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(2)).toDoubleArray(),
- ((GenericArrayData) doubleDataDeserialized).toDoubleArray());
-
- // Assert Timestamp Array
- Array timestampArrayRetrieved = rs.getArray("ts_data");
- Object timestampDataDeserialized =
- converter.createArrayConverter(timestampArrayType).deserialize(timestampArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) timestampDataDeserialized).toObjectArray(),
- Arrays.stream(((BinaryArrayData) rowData.getField(3)).toObjectArray(new TimestampType()))
- .toArray());
- // Assert 2D Double Array (it's a bit tricky given the 2D nature)
- Array double2DArrayRetrieved = rs.getArray("double_data_2d");
- Object double2DDataDeserialized =
- converter.createArrayConverter(doubleArray2DType).deserialize(double2DArrayRetrieved);
- // todo: 2d arrays are not well supported
- GenericArrayData field = (GenericArrayData) rowData.getField(4);
- assertNotNull(field);
-
- // todo: Row type not well supported
- Object rowRetrieved = rs.getObject("row_data");
- assertNotNull(rowRetrieved);
- }
- }
-}
diff --git a/sqrl-jdbc-1.18/pom.xml b/sqrl-jdbc-1.18/pom.xml
deleted file mode 100644
index b403f86..0000000
--- a/sqrl-jdbc-1.18/pom.xml
+++ /dev/null
@@ -1,126 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-jdbc-1.18
- Jdbc sink for flink 1.18
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
- 11
- 11
- UTF-8
- 1.18.1
-
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 3.1.2-1.18
- provided
-
-
- org.postgresql
- postgresql
- ${postgres.version}
-
-
- org.testcontainers
- postgresql
- ${testcontainers.version}
- test
-
-
- org.apache.flink
- flink-table-runtime
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-table-common
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
- ${project.groupId}
- sqrl-flexible-json
- ${project.version}
-
-
- org.apache.flink
- flink-csv
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-json
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-table-planner_2.12
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-test-utils
- ${flink.version}
- test
-
-
-
-
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
deleted file mode 100644
index 6c76f5e..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static com.datasqrl.type.FlinkArrayTypeUtil.getBaseFlinkArrayType;
-import static com.datasqrl.type.FlinkArrayTypeUtil.isScalarArray;
-import static com.datasqrl.type.PostgresArrayTypeConverter.getArrayScalarName;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
-
-import java.sql.Array;
-import java.sql.PreparedStatement;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.time.LocalDateTime;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.ArrayData;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-/** A sqrl class to handle arrays and extra data types */
-public abstract class SqrlBaseJdbcRowConverter extends AbstractJdbcRowConverter {
-
- public SqrlBaseJdbcRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (type.getTypeRoot() == TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- int timestampWithTimezone = Types.TIMESTAMP_WITH_TIMEZONE;
- return (val, index, statement) -> {
- if (val == null || val.isNullAt(index) || LogicalTypeRoot.NULL.equals(type.getTypeRoot())) {
- statement.setNull(index, timestampWithTimezone);
- } else {
- jdbcSerializationConverter.serialize(val, index, statement);
- }
- };
- } else if (type.getTypeRoot() == ROW) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- } else if (type.getTypeRoot() == MAP) {
- return (val, index, statement) -> setRow(type, val, index, statement);
- }
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- LogicalTypeRoot root = type.getTypeRoot();
-
- if (root == LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- return val ->
- val instanceof LocalDateTime
- ? TimestampData.fromLocalDateTime((LocalDateTime) val)
- : TimestampData.fromTimestamp((Timestamp) val);
- } else if (root == LogicalTypeRoot.ARRAY) {
- ArrayType arrayType = (ArrayType) type;
- return createArrayConverter(arrayType);
- } else if (root == LogicalTypeRoot.ROW) {
- return val -> val;
- } else if (root == LogicalTypeRoot.MAP) {
- return val -> val;
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- final int tsPrecision = ((LocalZonedTimestampType) type).getPrecision();
- return (val, index, statement) ->
- statement.setTimestamp(index, val.getTimestamp(index, tsPrecision).toTimestamp());
- case ARRAY:
- return (val, index, statement) -> setArray(type, val, index, statement);
- case ROW:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MAP:
- return (val, index, statement) -> setRow(type, val, index, statement);
- case MULTISET:
- case RAW:
- default:
- return super.createExternalConverter(type);
- }
- }
-
- public abstract void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement);
-
- @SneakyThrows
- public void setArray(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- ArrayData arrayData = val.getArray(index);
- createSqlArrayObject(type, arrayData, idx, flinkPreparedStatement.getStatement());
- }
- }
-
- @SneakyThrows
- private void createSqlArrayObject(
- LogicalType type, ArrayData data, int idx, PreparedStatement statement) {
- // Scalar arrays of any dimension are one array call
- if (isScalarArray(type)) {
- Object[] boxed;
- if (data instanceof GenericArrayData) {
- boxed = ((GenericArrayData) data).toObjectArray();
- } else if (data instanceof BinaryArrayData) {
- boxed = ((BinaryArrayData) data).toObjectArray(getBaseFlinkArrayType(type));
- } else {
- throw new RuntimeException("Unsupported ArrayData type: " + data.getClass());
- }
- Array array = statement.getConnection().createArrayOf(getArrayScalarName(type), boxed);
- statement.setArray(idx, array);
- } else {
- // If it is not a scalar array (e.g. row type), use an empty byte array.
- Array array = statement.getConnection().createArrayOf(getArrayType(), new Byte[0]);
- statement.setArray(idx, array);
- }
- }
-
- protected abstract String getArrayType();
-
- public abstract JdbcDeserializationConverter createArrayConverter(ArrayType arrayType);
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
deleted file mode 100644
index ea13e8e..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlFieldNamedPreparedStatementImpl.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.util.Preconditions.checkArgument;
-import static org.apache.flink.util.Preconditions.checkNotNull;
-
-import java.math.BigDecimal;
-import java.sql.Connection;
-import java.sql.Date;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Time;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-
-/** SQRL: added getStatement */
-
-/** Simple implementation of {@link FieldNamedPreparedStatement}. */
-public class SqrlFieldNamedPreparedStatementImpl implements FieldNamedPreparedStatement {
-
- private final PreparedStatement statement;
- private final int[][] indexMapping;
-
- private SqrlFieldNamedPreparedStatementImpl(PreparedStatement statement, int[][] indexMapping) {
- this.statement = statement;
- this.indexMapping = indexMapping;
- }
-
- public PreparedStatement getStatement() {
- return statement;
- }
-
- public int[][] getIndexMapping() {
- return indexMapping;
- }
-
- @Override
- public void clearParameters() throws SQLException {
- statement.clearParameters();
- }
-
- @Override
- public ResultSet executeQuery() throws SQLException {
- return statement.executeQuery();
- }
-
- @Override
- public void addBatch() throws SQLException {
- statement.addBatch();
- }
-
- @Override
- public int[] executeBatch() throws SQLException {
- return statement.executeBatch();
- }
-
- @Override
- public void setNull(int fieldIndex, int sqlType) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setNull(index, sqlType);
- }
- }
-
- @Override
- public void setBoolean(int fieldIndex, boolean x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBoolean(index, x);
- }
- }
-
- @Override
- public void setByte(int fieldIndex, byte x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setByte(index, x);
- }
- }
-
- @Override
- public void setShort(int fieldIndex, short x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setShort(index, x);
- }
- }
-
- @Override
- public void setInt(int fieldIndex, int x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setInt(index, x);
- }
- }
-
- @Override
- public void setLong(int fieldIndex, long x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setLong(index, x);
- }
- }
-
- @Override
- public void setFloat(int fieldIndex, float x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setFloat(index, x);
- }
- }
-
- @Override
- public void setDouble(int fieldIndex, double x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDouble(index, x);
- }
- }
-
- @Override
- public void setBigDecimal(int fieldIndex, BigDecimal x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBigDecimal(index, x);
- }
- }
-
- @Override
- public void setString(int fieldIndex, String x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setString(index, x);
- }
- }
-
- @Override
- public void setBytes(int fieldIndex, byte[] x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setBytes(index, x);
- }
- }
-
- @Override
- public void setDate(int fieldIndex, Date x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setDate(index, x);
- }
- }
-
- @Override
- public void setTime(int fieldIndex, Time x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTime(index, x);
- }
- }
-
- @Override
- public void setTimestamp(int fieldIndex, Timestamp x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setTimestamp(index, x);
- }
- }
-
- @Override
- public void setObject(int fieldIndex, Object x) throws SQLException {
- for (int index : indexMapping[fieldIndex]) {
- statement.setObject(index, x);
- }
- }
-
- @Override
- public void close() throws SQLException {
- statement.close();
- }
-
- // ----------------------------------------------------------------------------------------
-
- public static FieldNamedPreparedStatement prepareStatement(
- Connection connection, String sql, String[] fieldNames) throws SQLException {
- checkNotNull(connection, "connection must not be null.");
- checkNotNull(sql, "sql must not be null.");
- checkNotNull(fieldNames, "fieldNames must not be null.");
-
- if (sql.contains("?")) {
- throw new IllegalArgumentException("SQL statement must not contain ? character.");
- }
-
- HashMap> parameterMap = new HashMap<>();
- String parsedSQL = parseNamedStatement(sql, parameterMap);
- // currently, the statements must contain all the field parameters
- checkArgument(parameterMap.size() == fieldNames.length);
- int[][] indexMapping = new int[fieldNames.length][];
- for (int i = 0; i < fieldNames.length; i++) {
- String fieldName = fieldNames[i];
- checkArgument(
- parameterMap.containsKey(fieldName),
- fieldName + " doesn't exist in the parameters of SQL statement: " + sql);
- indexMapping[i] = parameterMap.get(fieldName).stream().mapToInt(v -> v).toArray();
- }
-
- return new SqrlFieldNamedPreparedStatementImpl(
- connection.prepareStatement(parsedSQL), indexMapping);
- }
-
- /**
- * Parses a sql with named parameters. The parameter-index mappings are put into the map, and the
- * parsed sql is returned.
- *
- * @param sql sql to parse
- * @param paramMap map to hold parameter-index mappings
- * @return the parsed sql
- */
- public static String parseNamedStatement(String sql, Map> paramMap) {
- StringBuilder parsedSql = new StringBuilder();
- int fieldIndex = 1; // SQL statement parameter index starts from 1
- int length = sql.length();
- for (int i = 0; i < length; i++) {
- char c = sql.charAt(i);
- if (':' == c) {
- int j = i + 1;
- while (j < length && Character.isJavaIdentifierPart(sql.charAt(j))) {
- j++;
- }
- String parameterName = sql.substring(i + 1, j);
- checkArgument(
- !parameterName.isEmpty(), "Named parameters in SQL statement must not be empty.");
- paramMap.computeIfAbsent(parameterName, n -> new ArrayList<>()).add(fieldIndex);
- fieldIndex++;
- i = j - 1;
- parsedSql.append('?');
- } else {
- parsedSql.append(c);
- }
- }
- return parsedSql.toString();
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
deleted file mode 100644
index 05a8c6f..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.DRIVER;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MISSING_KEY;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_TTL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.MAX_RETRY_TIMEOUT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.PASSWORD;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_AUTO_COMMIT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_FETCH_SIZE;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_COLUMN;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_LOWER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_NUM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_UPPER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_PARALLELISM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.TABLE_NAME;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.URL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.USERNAME;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.configuration.ConfigOption;
-import org.apache.flink.configuration.ConfigOptions;
-import org.apache.flink.configuration.ReadableConfig;
-import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectLoader;
-import org.apache.flink.connector.jdbc.internal.options.InternalJdbcConnectionOptions;
-import org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSink;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSource;
-import org.apache.flink.table.connector.sink.DynamicTableSink;
-import org.apache.flink.table.connector.source.lookup.LookupOptions;
-import org.apache.flink.table.factories.DynamicTableSinkFactory;
-import org.apache.flink.table.factories.FactoryUtil;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Factory for creating configured instances of {@link JdbcDynamicTableSource} and {@link
- * JdbcDynamicTableSink}.
- */
-@Internal
-public class SqrlJdbcDynamicTableFactory implements DynamicTableSinkFactory {
-
- public static final String IDENTIFIER = "jdbc-sqrl";
-
- @Override
- public DynamicTableSink createDynamicTableSink(Context context) {
- final FactoryUtil.TableFactoryHelper helper =
- FactoryUtil.createTableFactoryHelper(this, context);
- final ReadableConfig config = helper.getOptions();
-
- helper.validate();
- validateConfigOptions(config, context.getClassLoader());
- validateDataTypeWithJdbcDialect(
- context.getPhysicalRowDataType(), config.get(URL), context.getClassLoader());
- InternalJdbcConnectionOptions jdbcOptions = getJdbcOptions(config, context.getClassLoader());
-
- return new JdbcDynamicTableSink(
- jdbcOptions,
- getJdbcExecutionOptions(config),
- getJdbcDmlOptions(
- jdbcOptions, context.getPhysicalRowDataType(), context.getPrimaryKeyIndexes()),
- context.getPhysicalRowDataType());
- }
-
- private static void validateDataTypeWithJdbcDialect(
- DataType dataType, String url, ClassLoader classLoader) {
- JdbcDialect dialect = loadDialect(url, classLoader);
-
- dialect.validate((RowType) dataType.getLogicalType());
- }
-
- private InternalJdbcConnectionOptions getJdbcOptions(
- ReadableConfig readableConfig, ClassLoader classLoader) {
- final String url = readableConfig.get(URL);
- final InternalJdbcConnectionOptions.Builder builder =
- InternalJdbcConnectionOptions.builder()
- .setClassLoader(classLoader)
- .setDBUrl(url)
- .setTableName(readableConfig.get(TABLE_NAME))
- .setDialect(loadDialect(url, classLoader))
- .setParallelism(readableConfig.getOptional(SINK_PARALLELISM).orElse(null))
- .setConnectionCheckTimeoutSeconds(
- (int) readableConfig.get(MAX_RETRY_TIMEOUT).getSeconds());
-
- readableConfig.getOptional(DRIVER).ifPresent(builder::setDriverName);
- readableConfig.getOptional(USERNAME).ifPresent(builder::setUsername);
- readableConfig.getOptional(PASSWORD).ifPresent(builder::setPassword);
- return builder.build();
- }
-
- private static JdbcDialect loadDialect(String url, ClassLoader classLoader) {
- JdbcDialect dialect = JdbcDialectLoader.load(url, classLoader);
- // sqrl: standard postgres dialect with extended dialect
- if (dialect.dialectName().equalsIgnoreCase("PostgreSQL")) {
- return new SqrlPostgresDialect();
- }
- return dialect;
- }
-
- private JdbcExecutionOptions getJdbcExecutionOptions(ReadableConfig config) {
- final JdbcExecutionOptions.Builder builder = new JdbcExecutionOptions.Builder();
- builder.withBatchSize(config.get(SINK_BUFFER_FLUSH_MAX_ROWS));
- builder.withBatchIntervalMs(config.get(SINK_BUFFER_FLUSH_INTERVAL).toMillis());
- builder.withMaxRetries(config.get(SINK_MAX_RETRIES));
- return builder.build();
- }
-
- private JdbcDmlOptions getJdbcDmlOptions(
- InternalJdbcConnectionOptions jdbcOptions, DataType dataType, int[] primaryKeyIndexes) {
-
- String[] keyFields =
- Arrays.stream(primaryKeyIndexes)
- .mapToObj(i -> DataType.getFieldNames(dataType).get(i))
- .toArray(String[]::new);
-
- return JdbcDmlOptions.builder()
- .withTableName(jdbcOptions.getTableName())
- .withDialect(jdbcOptions.getDialect())
- .withFieldNames(DataType.getFieldNames(dataType).toArray(new String[0]))
- .withKeyFields(keyFields.length > 0 ? keyFields : null)
- .build();
- }
-
- @Override
- public String factoryIdentifier() {
- return IDENTIFIER;
- }
-
- @Override
- public Set> requiredOptions() {
- Set> requiredOptions = new HashSet<>();
- requiredOptions.add(URL);
- requiredOptions.add(TABLE_NAME);
- return requiredOptions;
- }
-
- @Override
- public Set> optionalOptions() {
- Set> optionalOptions = new HashSet<>();
- optionalOptions.add(DRIVER);
- optionalOptions.add(USERNAME);
- optionalOptions.add(PASSWORD);
- optionalOptions.add(SCAN_PARTITION_COLUMN);
- optionalOptions.add(SCAN_PARTITION_LOWER_BOUND);
- optionalOptions.add(SCAN_PARTITION_UPPER_BOUND);
- optionalOptions.add(SCAN_PARTITION_NUM);
- optionalOptions.add(SCAN_FETCH_SIZE);
- optionalOptions.add(SCAN_AUTO_COMMIT);
- optionalOptions.add(LOOKUP_CACHE_MAX_ROWS);
- optionalOptions.add(LOOKUP_CACHE_TTL);
- optionalOptions.add(LOOKUP_MAX_RETRIES);
- optionalOptions.add(LOOKUP_CACHE_MISSING_KEY);
- optionalOptions.add(SINK_BUFFER_FLUSH_MAX_ROWS);
- optionalOptions.add(SINK_BUFFER_FLUSH_INTERVAL);
- optionalOptions.add(SINK_MAX_RETRIES);
- optionalOptions.add(SINK_PARALLELISM);
- optionalOptions.add(MAX_RETRY_TIMEOUT);
- optionalOptions.add(LookupOptions.CACHE_TYPE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_MAX_ROWS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY);
- optionalOptions.add(LookupOptions.MAX_RETRIES);
- return optionalOptions;
- }
-
- @Override
- public Set> forwardOptions() {
- return Stream.of(
- URL,
- TABLE_NAME,
- USERNAME,
- PASSWORD,
- DRIVER,
- SINK_BUFFER_FLUSH_MAX_ROWS,
- SINK_BUFFER_FLUSH_INTERVAL,
- SINK_MAX_RETRIES,
- MAX_RETRY_TIMEOUT,
- SCAN_FETCH_SIZE,
- SCAN_AUTO_COMMIT)
- .collect(Collectors.toSet());
- }
-
- private void validateConfigOptions(ReadableConfig config, ClassLoader classLoader) {
- String jdbcUrl = config.get(URL);
- // JdbcDialectLoader.load(jdbcUrl, classLoader);
-
- checkAllOrNone(config, new ConfigOption[] {USERNAME, PASSWORD});
-
- checkAllOrNone(
- config,
- new ConfigOption[] {
- SCAN_PARTITION_COLUMN,
- SCAN_PARTITION_NUM,
- SCAN_PARTITION_LOWER_BOUND,
- SCAN_PARTITION_UPPER_BOUND
- });
-
- if (config.getOptional(SCAN_PARTITION_LOWER_BOUND).isPresent()
- && config.getOptional(SCAN_PARTITION_UPPER_BOUND).isPresent()) {
- long lowerBound = config.get(SCAN_PARTITION_LOWER_BOUND);
- long upperBound = config.get(SCAN_PARTITION_UPPER_BOUND);
- if (lowerBound > upperBound) {
- throw new IllegalArgumentException(
- String.format(
- "'%s'='%s' must not be larger than '%s'='%s'.",
- SCAN_PARTITION_LOWER_BOUND.key(),
- lowerBound,
- SCAN_PARTITION_UPPER_BOUND.key(),
- upperBound));
- }
- }
-
- checkAllOrNone(config, new ConfigOption[] {LOOKUP_CACHE_MAX_ROWS, LOOKUP_CACHE_TTL});
-
- if (config.get(LOOKUP_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- LOOKUP_MAX_RETRIES.key(), config.get(LOOKUP_MAX_RETRIES)));
- }
-
- if (config.get(SINK_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- SINK_MAX_RETRIES.key(), config.get(SINK_MAX_RETRIES)));
- }
-
- if (config.get(MAX_RETRY_TIMEOUT).getSeconds() <= 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option must be in second granularity and shouldn't be smaller than 1 second, but is %s.",
- MAX_RETRY_TIMEOUT.key(),
- config.get(
- ConfigOptions.key(MAX_RETRY_TIMEOUT.key()).stringType().noDefaultValue())));
- }
- }
-
- private void checkAllOrNone(ReadableConfig config, ConfigOption>[] configOptions) {
- int presentCount = 0;
- for (ConfigOption configOption : configOptions) {
- if (config.getOptional(configOption).isPresent()) {
- presentCount++;
- }
- }
- String[] propertyNames =
- Arrays.stream(configOptions).map(ConfigOption::key).toArray(String[]::new);
- Preconditions.checkArgument(
- configOptions.length == presentCount || presentCount == 0,
- "Either all or none of the following options should be provided:\n"
- + String.join("\n", propertyNames));
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
deleted file mode 100644
index 4dae46a..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.RowType.RowField;
-
-/**
- * JDBC dialect for PostgreSQL.
- *
- * SQRL: Add quoting to identifiers
- */
-public class SqrlPostgresDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-datetime.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
- private static final int MAX_DECIMAL_PRECISION = 1000;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public JdbcRowConverter getRowConverter(RowType rowType) {
- return new SqrlPostgresRowConverter(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("org.postgresql.Driver");
- }
-
- /** Postgres upsert query. It use ON CONFLICT ... DO UPDATE SET.. to replace into Postgres. */
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- String uniqueColumns =
- Arrays.stream(uniqueKeyFields).map(this::quoteIdentifier).collect(Collectors.joining(", "));
- String updateClause =
- Arrays.stream(fieldNames)
- .map(f -> quoteIdentifier(f) + "=EXCLUDED." + quoteIdentifier(f))
- .collect(Collectors.joining(", "));
- return Optional.of(
- getInsertIntoStatement(tableName, fieldNames)
- + " ON CONFLICT ("
- + uniqueColumns
- + ")"
- + " DO UPDATE SET "
- + updateClause);
- }
-
- @Override
- public void validate(RowType rowType) throws ValidationException {
- List unsupportedTypes =
- rowType.getFields().stream()
- .map(RowField::getType)
- .filter(type -> LogicalTypeRoot.RAW.equals(type.getTypeRoot()))
- .filter(type -> !isSupportedType(type))
- .collect(Collectors.toList());
-
- if (!unsupportedTypes.isEmpty()) {
- throw new ValidationException(
- String.format(
- "The %s dialect doesn't support type: %s.", this.dialectName(), unsupportedTypes));
- }
-
- super.validate(rowType);
- }
-
- private boolean isSupportedType(LogicalType type) {
- return SqrlPostgresRowConverter.sqrlSerializers.containsKey(type.getDefaultConversion());
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "\"" + identifier + "\"";
- }
-
- @Override
- public String dialectName() {
- return "PostgreSQL";
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // The data types used in PostgreSQL are list at:
- // https://www.postgresql.org/docs/12/datatype.html
-
- // TODO: We can't convert BINARY data type to
- // PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO in
- // LegacyTypeInfoDataTypeConverter.
-
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
- LogicalTypeRoot.ARRAY,
- LogicalTypeRoot.MAP,
- LogicalTypeRoot.RAW // see validate() for supported structured types
- );
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
deleted file mode 100644
index 601feff..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import com.datasqrl.type.JdbcTypeSerializer;
-import java.lang.reflect.Type;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.stream.Collectors;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.RowData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
-import org.postgresql.jdbc.PgArray;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * PostgreSQL.
- *
- * SQRL:Add array support
- */
-public class SqrlPostgresRowConverter extends SqrlBaseJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- public static final Map<
- Type, JdbcTypeSerializer>
- sqrlSerializers = discoverSerializers();
-
- private static Map<
- Type, JdbcTypeSerializer>
- discoverSerializers() {
- return ServiceLoader.load(JdbcTypeSerializer.class).stream()
- .map(f -> f.get())
- .filter(f -> f.getDialectId().equalsIgnoreCase("postgres"))
- .collect(Collectors.toMap(JdbcTypeSerializer::getConversionClass, t -> t));
- }
-
- @Override
- public String converterName() {
- return "PostgreSQL";
- }
-
- public SqrlPostgresRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @SneakyThrows
- public void setRow(
- LogicalType type, RowData val, int index, FieldNamedPreparedStatement statement) {
- SqrlFieldNamedPreparedStatementImpl flinkPreparedStatement =
- (SqrlFieldNamedPreparedStatementImpl) statement;
- for (int idx : flinkPreparedStatement.getIndexMapping()[index]) {
- // RowData row = val.getRow(index, ((RowType) type).getFieldCount());
- // java.sql.Array sqlArray = flinkPreparedStatement.getStatement()
- // .getConnection().createArrayOf("bytea", );
- flinkPreparedStatement.getStatement().setBytes(idx, new byte[0]);
- }
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getDeserializerConverter().create();
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return jdbcSerializationConverter::serialize;
- } else {
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getSerializerConverter(type).create();
- } else {
- return super.createExternalConverter(type);
- }
- }
-
- @Override
- protected String getArrayType() {
- return "bytea";
- }
-
- @Override
- public JdbcDeserializationConverter createArrayConverter(ArrayType arrayType) {
- // Since PGJDBC 42.2.15 (https://github.com/pgjdbc/pgjdbc/pull/1194) bytea[] is wrapped in
- // primitive byte arrays
- final Class> elementClass =
- LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
- final JdbcDeserializationConverter elementConverter =
- createNullableInternalConverter(arrayType.getElementType());
- return val -> {
- // sqrl: check if scalar array
-
- Object[] in;
- if (val instanceof PgArray) {
- PgArray pgArray = (PgArray) val;
- in = (Object[]) pgArray.getArray();
- } else {
- in = (Object[]) val;
- }
- final Object[] array =
- (Object[]) java.lang.reflect.Array.newInstance(elementClass, in.length);
- for (int i = 0; i < in.length; i++) {
- array[i] = elementConverter.deserialize(in[i]);
- }
- return new GenericArrayData(array);
- };
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
deleted file mode 100644
index 547c983..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class FlinkArrayTypeUtil {
-
- public static LogicalType getBaseFlinkArrayType(LogicalType type) {
- if (type instanceof ArrayType) {
- return getBaseFlinkArrayType(((ArrayType) type).getElementType());
- }
- return type;
- }
-
- public static boolean isScalarArray(LogicalType type) {
- if (type instanceof ArrayType) {
- LogicalType elementType = ((ArrayType) type).getElementType();
- return isScalar(elementType) || isScalarArray(elementType);
- }
- return false;
- }
-
- public static boolean isScalar(LogicalType type) {
- switch (type.getTypeRoot()) {
- case BOOLEAN:
- case TINYINT:
- case SMALLINT:
- case INTEGER:
- case BIGINT:
- case FLOAT:
- case DOUBLE:
- case CHAR:
- case VARCHAR:
- case BINARY:
- case VARBINARY:
- case DATE:
- case TIME_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- case DECIMAL:
- return true;
- default:
- return false;
- }
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
deleted file mode 100644
index 0a726ee..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.LogicalType;
-
-public interface JdbcTypeSerializer {
-
- String getDialectId();
-
- Class getConversionClass();
-
- String dialectTypeName();
-
- GenericDeserializationConverter getDeserializerConverter();
-
- GenericSerializationConverter getSerializerConverter(LogicalType type);
-
- interface GenericSerializationConverter {
- T create();
- }
-
- interface GenericDeserializationConverter {
- T create();
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
deleted file mode 100644
index 96ee9f9..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class PostgresArrayTypeConverter {
-
- /** Return the base array type for flink type */
- public static String getArrayScalarName(LogicalType type) {
- switch (type.getTypeRoot()) {
- case CHAR:
- case VARCHAR:
- return "text";
- case BOOLEAN:
- return "boolean";
- case BINARY:
- case VARBINARY:
- return "bytea";
- case DECIMAL:
- return "decimal";
- case TINYINT:
- return "smallint";
- case SMALLINT:
- return "smallint";
- case INTEGER:
- return "integer";
- case BIGINT:
- return "bigint";
- case FLOAT:
- return "real"; // PostgreSQL uses REAL for float
- case DOUBLE:
- return "double";
- case DATE:
- return "date";
- case TIME_WITHOUT_TIME_ZONE:
- return "time without time zone";
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- return "timestamp without time zone";
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return "timestamptz";
- case INTERVAL_YEAR_MONTH:
- return "interval year to month";
- case INTERVAL_DAY_TIME:
- return "interval day to second";
- case NULL:
- return "void";
- case ARRAY:
- return getArrayScalarName(((ArrayType) type).getElementType());
- case MULTISET:
- case MAP:
- case ROW:
- case DISTINCT_TYPE:
- case STRUCTURED_TYPE:
- case RAW:
- case SYMBOL:
- case UNRESOLVED:
- default:
- throw new RuntimeException("Cannot convert type to array type");
- }
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
deleted file mode 100644
index 7c0eb5d..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.json.FlinkJsonType;
-import com.datasqrl.json.FlinkJsonTypeSerializer;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.table.data.RawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.postgresql.util.PGobject;
-
-public class PostgresJsonTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return FlinkJsonType.class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () ->
- (val) -> {
- FlinkJsonType t = (FlinkJsonType) val;
- return t.getJson();
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- FlinkJsonTypeSerializer typeSerializer = new FlinkJsonTypeSerializer();
-
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- RawValueData object = val.getRawValue(index);
- FlinkJsonType vec = object.toObject(typeSerializer);
- if (vec == null) {
- statement.setObject(index, null);
- } else {
- pgObject.setValue(vec.getJson().toString());
- statement.setObject(index, pgObject);
- }
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java b/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
deleted file mode 100644
index c86f5d0..0000000
--- a/sqrl-jdbc-1.18/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.format.SqrlRowDataToJsonConverters;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions.MapNullKeyMode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.types.Row;
-import org.postgresql.util.PGobject;
-
-public class PostgresRowTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return Row[].class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () -> {
- return (val) -> null;
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- ObjectMapper mapper = new ObjectMapper();
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- SqrlRowDataToJsonConverters rowDataToJsonConverter =
- new SqrlRowDataToJsonConverters(TimestampFormat.SQL, MapNullKeyMode.DROP, "null");
-
- ArrayType arrayType = (ArrayType) type;
- ObjectNode objectNode = mapper.createObjectNode();
- JsonNode convert =
- rowDataToJsonConverter
- .createConverter(arrayType.getElementType())
- .convert(mapper, objectNode, val);
-
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- pgObject.setValue(convert.toString());
- statement.setObject(index, pgObject);
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.18/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer b/sqrl-jdbc-1.18/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
deleted file mode 100644
index 0673d25..0000000
--- a/sqrl-jdbc-1.18/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
+++ /dev/null
@@ -1,2 +0,0 @@
-com.datasqrl.type.PostgresRowTypeSerializer
-com.datasqrl.type.PostgresJsonTypeSerializer
\ No newline at end of file
diff --git a/sqrl-jdbc-1.18/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/sqrl-jdbc-1.18/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
deleted file mode 100644
index 20a59c9..0000000
--- a/sqrl-jdbc-1.18/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.jdbc.SqrlJdbcDynamicTableFactory
\ No newline at end of file
diff --git a/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java b/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
deleted file mode 100644
index 0e4c0b8..0000000
--- a/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.Statement;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.ResultKind;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.junit5.MiniClusterExtension;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-@ExtendWith(MiniClusterExtension.class)
-public class FlinkJdbcTest {
-
- @Test
- public void testFlinkWithPostgres() throws Exception {
- // Start PostgreSQL container
- try (PostgreSQLContainer> postgres = new PostgreSQLContainer<>("postgres:14")) {
- postgres.start();
- // Establish a connection and create the PostgreSQL table
- try (Connection conn =
- DriverManager.getConnection(
- postgres.getJdbcUrl(), postgres.getUsername(), postgres.getPassword());
- Statement stmt = conn.createStatement()) {
- String createTableSQL = "CREATE TABLE test_table (" + " \"arrayOfRows\" JSONB " + ")";
- stmt.executeUpdate(createTableSQL);
- }
-
- // Set up Flink environment
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
-
- // Define the schema
- String createSourceTable =
- "CREATE TABLE datagen_source ("
- + " arrayOfRows ARRAY> "
- + ") WITH ("
- + " 'connector' = 'datagen',"
- + " 'number-of-rows' = '10'"
- + ")";
-
- String createSinkTable =
- "CREATE TABLE jdbc_sink ("
- + " arrayOfRows RAW('com.datasqrl.json.FlinkJsonType', 'ADFjb20uZGF0YXNxcmwuanNvbi5GbGlua0pzb25UeXBlU2VyaWFsaXplclNuYXBzaG90AAAAAQApY29tLmRhdGFzcXJsLmpzb24uRmxpbmtKc29uVHlwZVNlcmlhbGl6ZXI=') "
- + ") WITH ("
- + " 'connector' = 'jdbc-sqrl', "
- + " 'url' = '"
- + postgres.getJdbcUrl()
- + "', "
- + " 'table-name' = 'test_table', "
- + " 'username' = '"
- + postgres.getUsername()
- + "', "
- + " 'password' = '"
- + postgres.getPassword()
- + "'"
- + ")";
-
- // Register tables in the environment
- tableEnv.executeSql(
- "CREATE TEMPORARY FUNCTION IF NOT EXISTS `tojson` AS 'com.datasqrl.json.ToJson' LANGUAGE JAVA");
- tableEnv.executeSql(createSourceTable);
- tableEnv.executeSql(createSinkTable);
-
- // Set up a simple Flink job
- TableResult tableResult =
- tableEnv.executeSql(
- "INSERT INTO jdbc_sink SELECT tojson(arrayOfRows) AS arrayOfRows FROM datagen_source");
- tableResult.print();
-
- assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind());
- }
- }
-}
diff --git a/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java b/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
deleted file mode 100644
index b6d4448..0000000
--- a/sqrl-jdbc-1.18/src/test/java/com/datasqrl/jdbc/PostgresRowConverterTest.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import java.sql.Array;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.GenericRowData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.data.writer.BinaryArrayWriter;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.DoubleType;
-import org.apache.flink.table.types.logical.IntType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.TimestampType;
-import org.apache.flink.table.types.logical.VarCharType;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-class PostgresRowConverterTest {
- private static final PostgreSQLContainer postgres = new PostgreSQLContainer("postgres:15.4");
-
- @BeforeAll
- public static void setUp() {
- postgres.start();
- }
-
- @AfterAll
- public static void tearDown() {
- postgres.stop();
- }
-
- private ArrayType doubleArrayType = new ArrayType(new DoubleType());
- private ArrayType timestampArrayType = new ArrayType(new LocalZonedTimestampType());
- private ArrayType doubleArray2DType = new ArrayType(doubleArrayType);
- private RowType sampleRowType = RowType.of(new IntType(), new VarCharType());
-
- private void executeUpdate(Connection connection, String query) throws Exception {
- try (Statement stmt = connection.createStatement()) {
- stmt.executeUpdate(query);
- }
- }
-
- @Test
- public void testArraySerializationAndDeserialization() throws Exception {
- try (Connection connection = postgres.createConnection("")) {
- executeUpdate(
- connection,
- "CREATE TABLE test (id int, int_data int[], double_data double precision[], ts_data timestamptz[], double_data_2d double precision[][], row_data bytea)");
-
- // Set up the converter
- RowType rowType =
- RowType.of(
- new IntType(),
- new ArrayType(new IntType()),
- doubleArrayType,
- timestampArrayType,
- doubleArray2DType,
- sampleRowType);
- SqrlPostgresRowConverter converter = new SqrlPostgresRowConverter(rowType);
-
- // Sample data
- GenericRowData rowData = new GenericRowData(6);
- rowData.setField(0, 1);
-
- // Integer Array - GenericArrayData
- GenericArrayData intArray = new GenericArrayData(new int[] {1, 2, 3});
- rowData.setField(1, intArray);
-
- // Double Array - GenericArrayData
- GenericArrayData doubleArray = new GenericArrayData(new double[] {1.1, 2.2, 3.3});
- rowData.setField(2, doubleArray);
-
- // Timestamp Array - GenericArrayData
- BinaryArrayData array = new BinaryArrayData();
- BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
- final int precision = 3;
- writer.reset();
- writer.writeTimestamp(0, TimestampData.fromEpochMillis(123000L), precision);
- writer.writeTimestamp(1, TimestampData.fromEpochMillis(123000L), precision);
- writer.complete();
- rowData.setField(3, array);
-
- // 2D Double Array - GenericArrayData
- GenericArrayData doubleArray2d =
- new GenericArrayData(new double[][] {{1.1, 2.2}, {3.3, 4.4}});
- rowData.setField(4, doubleArray2d);
-
- // RowType not being an array
- GenericRowData sampleRow = new GenericRowData(2);
- sampleRow.setField(0, 10);
- sampleRow.setField(1, "test");
- rowData.setField(5, sampleRow);
-
- FieldNamedPreparedStatement statement =
- SqrlFieldNamedPreparedStatementImpl.prepareStatement(
- connection,
- "INSERT INTO test (id, int_data, double_data, ts_data, double_data_2d, row_data) VALUES (:id, :int_data, :double_data, :ts_data, :double_data_2d, :row_data)",
- List.of("id", "int_data", "double_data", "ts_data", "double_data_2d", "row_data")
- .toArray(String[]::new));
-
- for (int i = 0; i < rowType.getFieldCount(); i++) {
- JdbcSerializationConverter externalConverter =
- converter.createExternalConverter(rowType.getTypeAt(i));
- externalConverter.serialize(rowData, i, statement);
- }
- statement.addBatch();
-
- int[] result = statement.executeBatch();
- assertEquals(1, result.length);
-
- Statement stmt = connection.createStatement();
- // Deserialize
- ResultSet rs =
- stmt.executeQuery(
- "SELECT int_data, double_data, ts_data, double_data_2d, row_data FROM test WHERE id=1");
- assertTrue(rs.next());
-
- // Assert Integer Array
- Array intArrayRetrieved = rs.getArray("int_data");
- Object intDataDeserialized =
- converter
- .createArrayConverter(new ArrayType(new IntType()))
- .deserialize(intArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(1)).toIntArray(),
- ((GenericArrayData) intDataDeserialized).toIntArray());
-
- // Assert Double Array
- Array doubleArrayRetrieved = rs.getArray("double_data");
- Object doubleDataDeserialized =
- converter.createArrayConverter(doubleArrayType).deserialize(doubleArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) rowData.getField(2)).toDoubleArray(),
- ((GenericArrayData) doubleDataDeserialized).toDoubleArray());
-
- // Assert Timestamp Array
- Array timestampArrayRetrieved = rs.getArray("ts_data");
- Object timestampDataDeserialized =
- converter.createArrayConverter(timestampArrayType).deserialize(timestampArrayRetrieved);
- assertArrayEquals(
- ((GenericArrayData) timestampDataDeserialized).toObjectArray(),
- Arrays.stream(((BinaryArrayData) rowData.getField(3)).toObjectArray(new TimestampType()))
- .toArray());
- // Assert 2D Double Array (it's a bit tricky given the 2D nature)
- Array double2DArrayRetrieved = rs.getArray("double_data_2d");
- Object double2DDataDeserialized =
- converter.createArrayConverter(doubleArray2DType).deserialize(double2DArrayRetrieved);
- // todo: 2d arrays are not well supported
- GenericArrayData field = (GenericArrayData) rowData.getField(4);
- assertNotNull(field);
-
- // todo: Row type not well supported
- Object rowRetrieved = rs.getObject("row_data");
- assertNotNull(rowRetrieved);
- }
- }
-}
diff --git a/sqrl-jdbc-1.19/pom.xml b/sqrl-jdbc-1.19/pom.xml
deleted file mode 100644
index d0cc1a1..0000000
--- a/sqrl-jdbc-1.19/pom.xml
+++ /dev/null
@@ -1,126 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-jdbc-1.19
- Jdbc sink for flink 1.19
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
- 11
- 11
- UTF-8
- 1.19.1
-
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 3.2.0-1.19
- provided
-
-
- org.postgresql
- postgresql
- ${postgres.version}
-
-
- org.testcontainers
- postgresql
- ${testcontainers.version}
- test
-
-
- org.apache.flink
- flink-table-runtime
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-table-common
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
- ${project.groupId}
- sqrl-flexible-json
- ${project.version}
-
-
- org.apache.flink
- flink-csv
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-json
- ${flink.version}
- provided
-
-
- org.apache.flink
- flink-table-planner_2.12
- ${flink.version}
- test
-
-
- org.apache.flink
- flink-test-utils
- ${flink.version}
- test
-
-
-
-
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
deleted file mode 100644
index b071627..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlBaseJdbcRowConverter.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static com.datasqrl.type.FlinkArrayTypeUtil.getBaseFlinkArrayType;
-import static com.datasqrl.type.FlinkArrayTypeUtil.isScalarArray;
-import static com.datasqrl.type.PostgresArrayTypeConverter.getArrayScalarName;
-import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
-
-import java.sql.Array;
-import java.sql.PreparedStatement;
-import java.sql.Timestamp;
-import java.sql.Types;
-import java.time.LocalDateTime;
-import lombok.SneakyThrows;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.data.ArrayData;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.data.binary.BinaryArrayData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LocalZonedTimestampType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-/** A sqrl class to handle arrays and extra data types */
-public abstract class SqrlBaseJdbcRowConverter extends AbstractJdbcRowConverter {
-
- public SqrlBaseJdbcRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (type.getTypeRoot() == TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- int timestampWithTimezone = Types.TIMESTAMP_WITH_TIMEZONE;
- return (val, index, statement) -> {
- if (val == null || val.isNullAt(index) || LogicalTypeRoot.NULL.equals(type.getTypeRoot())) {
- statement.setNull(index, timestampWithTimezone);
- } else {
- jdbcSerializationConverter.serialize(val, index, statement);
- }
- };
- }
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- LogicalTypeRoot root = type.getTypeRoot();
-
- if (root == LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {
- return val ->
- val instanceof LocalDateTime
- ? TimestampData.fromLocalDateTime((LocalDateTime) val)
- : TimestampData.fromTimestamp((Timestamp) val);
- } else if (root == LogicalTypeRoot.ARRAY) {
- ArrayType arrayType = (ArrayType) type;
- return createArrayConverter(arrayType);
- } else if (root == LogicalTypeRoot.ROW) {
- return val -> val;
- } else if (root == LogicalTypeRoot.MAP) {
- return val -> val;
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- final int tsPrecision = ((LocalZonedTimestampType) type).getPrecision();
- return (val, index, statement) ->
- statement.setTimestamp(index, val.getTimestamp(index, tsPrecision).toTimestamp());
- case MULTISET:
- case RAW:
- default:
- return super.createExternalConverter(type);
- }
- }
-
- @SneakyThrows
- private void createSqlArrayObject(
- LogicalType type, ArrayData data, int idx, PreparedStatement statement) {
- // Scalar arrays of any dimension are one array call
- if (isScalarArray(type)) {
- Object[] boxed;
- if (data instanceof GenericArrayData) {
- boxed = ((GenericArrayData) data).toObjectArray();
- } else if (data instanceof BinaryArrayData) {
- boxed = ((BinaryArrayData) data).toObjectArray(getBaseFlinkArrayType(type));
- } else {
- throw new RuntimeException("Unsupported ArrayData type: " + data.getClass());
- }
- Array array = statement.getConnection().createArrayOf(getArrayScalarName(type), boxed);
- statement.setArray(idx, array);
- } else {
- // If it is not a scalar array (e.g. row type), use an empty byte array.
- Array array = statement.getConnection().createArrayOf(getArrayType(), new Byte[0]);
- statement.setArray(idx, array);
- }
- }
-
- protected abstract String getArrayType();
-
- public abstract JdbcDeserializationConverter createArrayConverter(ArrayType arrayType);
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
deleted file mode 100644
index 05a8c6f..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlJdbcDynamicTableFactory.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.DRIVER;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_MISSING_KEY;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_CACHE_TTL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.LOOKUP_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.MAX_RETRY_TIMEOUT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.PASSWORD;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_AUTO_COMMIT;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_FETCH_SIZE;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_COLUMN;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_LOWER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_NUM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SCAN_PARTITION_UPPER_BOUND;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_INTERVAL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_BUFFER_FLUSH_MAX_ROWS;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_MAX_RETRIES;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.SINK_PARALLELISM;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.TABLE_NAME;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.URL;
-import static org.apache.flink.connector.jdbc.table.JdbcConnectorOptions.USERNAME;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.configuration.ConfigOption;
-import org.apache.flink.configuration.ConfigOptions;
-import org.apache.flink.configuration.ReadableConfig;
-import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectLoader;
-import org.apache.flink.connector.jdbc.internal.options.InternalJdbcConnectionOptions;
-import org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSink;
-import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSource;
-import org.apache.flink.table.connector.sink.DynamicTableSink;
-import org.apache.flink.table.connector.source.lookup.LookupOptions;
-import org.apache.flink.table.factories.DynamicTableSinkFactory;
-import org.apache.flink.table.factories.FactoryUtil;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Factory for creating configured instances of {@link JdbcDynamicTableSource} and {@link
- * JdbcDynamicTableSink}.
- */
-@Internal
-public class SqrlJdbcDynamicTableFactory implements DynamicTableSinkFactory {
-
- public static final String IDENTIFIER = "jdbc-sqrl";
-
- @Override
- public DynamicTableSink createDynamicTableSink(Context context) {
- final FactoryUtil.TableFactoryHelper helper =
- FactoryUtil.createTableFactoryHelper(this, context);
- final ReadableConfig config = helper.getOptions();
-
- helper.validate();
- validateConfigOptions(config, context.getClassLoader());
- validateDataTypeWithJdbcDialect(
- context.getPhysicalRowDataType(), config.get(URL), context.getClassLoader());
- InternalJdbcConnectionOptions jdbcOptions = getJdbcOptions(config, context.getClassLoader());
-
- return new JdbcDynamicTableSink(
- jdbcOptions,
- getJdbcExecutionOptions(config),
- getJdbcDmlOptions(
- jdbcOptions, context.getPhysicalRowDataType(), context.getPrimaryKeyIndexes()),
- context.getPhysicalRowDataType());
- }
-
- private static void validateDataTypeWithJdbcDialect(
- DataType dataType, String url, ClassLoader classLoader) {
- JdbcDialect dialect = loadDialect(url, classLoader);
-
- dialect.validate((RowType) dataType.getLogicalType());
- }
-
- private InternalJdbcConnectionOptions getJdbcOptions(
- ReadableConfig readableConfig, ClassLoader classLoader) {
- final String url = readableConfig.get(URL);
- final InternalJdbcConnectionOptions.Builder builder =
- InternalJdbcConnectionOptions.builder()
- .setClassLoader(classLoader)
- .setDBUrl(url)
- .setTableName(readableConfig.get(TABLE_NAME))
- .setDialect(loadDialect(url, classLoader))
- .setParallelism(readableConfig.getOptional(SINK_PARALLELISM).orElse(null))
- .setConnectionCheckTimeoutSeconds(
- (int) readableConfig.get(MAX_RETRY_TIMEOUT).getSeconds());
-
- readableConfig.getOptional(DRIVER).ifPresent(builder::setDriverName);
- readableConfig.getOptional(USERNAME).ifPresent(builder::setUsername);
- readableConfig.getOptional(PASSWORD).ifPresent(builder::setPassword);
- return builder.build();
- }
-
- private static JdbcDialect loadDialect(String url, ClassLoader classLoader) {
- JdbcDialect dialect = JdbcDialectLoader.load(url, classLoader);
- // sqrl: standard postgres dialect with extended dialect
- if (dialect.dialectName().equalsIgnoreCase("PostgreSQL")) {
- return new SqrlPostgresDialect();
- }
- return dialect;
- }
-
- private JdbcExecutionOptions getJdbcExecutionOptions(ReadableConfig config) {
- final JdbcExecutionOptions.Builder builder = new JdbcExecutionOptions.Builder();
- builder.withBatchSize(config.get(SINK_BUFFER_FLUSH_MAX_ROWS));
- builder.withBatchIntervalMs(config.get(SINK_BUFFER_FLUSH_INTERVAL).toMillis());
- builder.withMaxRetries(config.get(SINK_MAX_RETRIES));
- return builder.build();
- }
-
- private JdbcDmlOptions getJdbcDmlOptions(
- InternalJdbcConnectionOptions jdbcOptions, DataType dataType, int[] primaryKeyIndexes) {
-
- String[] keyFields =
- Arrays.stream(primaryKeyIndexes)
- .mapToObj(i -> DataType.getFieldNames(dataType).get(i))
- .toArray(String[]::new);
-
- return JdbcDmlOptions.builder()
- .withTableName(jdbcOptions.getTableName())
- .withDialect(jdbcOptions.getDialect())
- .withFieldNames(DataType.getFieldNames(dataType).toArray(new String[0]))
- .withKeyFields(keyFields.length > 0 ? keyFields : null)
- .build();
- }
-
- @Override
- public String factoryIdentifier() {
- return IDENTIFIER;
- }
-
- @Override
- public Set> requiredOptions() {
- Set> requiredOptions = new HashSet<>();
- requiredOptions.add(URL);
- requiredOptions.add(TABLE_NAME);
- return requiredOptions;
- }
-
- @Override
- public Set> optionalOptions() {
- Set> optionalOptions = new HashSet<>();
- optionalOptions.add(DRIVER);
- optionalOptions.add(USERNAME);
- optionalOptions.add(PASSWORD);
- optionalOptions.add(SCAN_PARTITION_COLUMN);
- optionalOptions.add(SCAN_PARTITION_LOWER_BOUND);
- optionalOptions.add(SCAN_PARTITION_UPPER_BOUND);
- optionalOptions.add(SCAN_PARTITION_NUM);
- optionalOptions.add(SCAN_FETCH_SIZE);
- optionalOptions.add(SCAN_AUTO_COMMIT);
- optionalOptions.add(LOOKUP_CACHE_MAX_ROWS);
- optionalOptions.add(LOOKUP_CACHE_TTL);
- optionalOptions.add(LOOKUP_MAX_RETRIES);
- optionalOptions.add(LOOKUP_CACHE_MISSING_KEY);
- optionalOptions.add(SINK_BUFFER_FLUSH_MAX_ROWS);
- optionalOptions.add(SINK_BUFFER_FLUSH_INTERVAL);
- optionalOptions.add(SINK_MAX_RETRIES);
- optionalOptions.add(SINK_PARALLELISM);
- optionalOptions.add(MAX_RETRY_TIMEOUT);
- optionalOptions.add(LookupOptions.CACHE_TYPE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_MAX_ROWS);
- optionalOptions.add(LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY);
- optionalOptions.add(LookupOptions.MAX_RETRIES);
- return optionalOptions;
- }
-
- @Override
- public Set> forwardOptions() {
- return Stream.of(
- URL,
- TABLE_NAME,
- USERNAME,
- PASSWORD,
- DRIVER,
- SINK_BUFFER_FLUSH_MAX_ROWS,
- SINK_BUFFER_FLUSH_INTERVAL,
- SINK_MAX_RETRIES,
- MAX_RETRY_TIMEOUT,
- SCAN_FETCH_SIZE,
- SCAN_AUTO_COMMIT)
- .collect(Collectors.toSet());
- }
-
- private void validateConfigOptions(ReadableConfig config, ClassLoader classLoader) {
- String jdbcUrl = config.get(URL);
- // JdbcDialectLoader.load(jdbcUrl, classLoader);
-
- checkAllOrNone(config, new ConfigOption[] {USERNAME, PASSWORD});
-
- checkAllOrNone(
- config,
- new ConfigOption[] {
- SCAN_PARTITION_COLUMN,
- SCAN_PARTITION_NUM,
- SCAN_PARTITION_LOWER_BOUND,
- SCAN_PARTITION_UPPER_BOUND
- });
-
- if (config.getOptional(SCAN_PARTITION_LOWER_BOUND).isPresent()
- && config.getOptional(SCAN_PARTITION_UPPER_BOUND).isPresent()) {
- long lowerBound = config.get(SCAN_PARTITION_LOWER_BOUND);
- long upperBound = config.get(SCAN_PARTITION_UPPER_BOUND);
- if (lowerBound > upperBound) {
- throw new IllegalArgumentException(
- String.format(
- "'%s'='%s' must not be larger than '%s'='%s'.",
- SCAN_PARTITION_LOWER_BOUND.key(),
- lowerBound,
- SCAN_PARTITION_UPPER_BOUND.key(),
- upperBound));
- }
- }
-
- checkAllOrNone(config, new ConfigOption[] {LOOKUP_CACHE_MAX_ROWS, LOOKUP_CACHE_TTL});
-
- if (config.get(LOOKUP_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- LOOKUP_MAX_RETRIES.key(), config.get(LOOKUP_MAX_RETRIES)));
- }
-
- if (config.get(SINK_MAX_RETRIES) < 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option shouldn't be negative, but is %s.",
- SINK_MAX_RETRIES.key(), config.get(SINK_MAX_RETRIES)));
- }
-
- if (config.get(MAX_RETRY_TIMEOUT).getSeconds() <= 0) {
- throw new IllegalArgumentException(
- String.format(
- "The value of '%s' option must be in second granularity and shouldn't be smaller than 1 second, but is %s.",
- MAX_RETRY_TIMEOUT.key(),
- config.get(
- ConfigOptions.key(MAX_RETRY_TIMEOUT.key()).stringType().noDefaultValue())));
- }
- }
-
- private void checkAllOrNone(ReadableConfig config, ConfigOption>[] configOptions) {
- int presentCount = 0;
- for (ConfigOption configOption : configOptions) {
- if (config.getOptional(configOption).isPresent()) {
- presentCount++;
- }
- }
- String[] propertyNames =
- Arrays.stream(configOptions).map(ConfigOption::key).toArray(String[]::new);
- Preconditions.checkArgument(
- configOptions.length == presentCount || presentCount == 0,
- "Either all or none of the following options should be provided:\n"
- + String.join("\n", propertyNames));
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
deleted file mode 100644
index 4dae46a..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresDialect.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.flink.connector.jdbc.converter.JdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.RowType.RowField;
-
-/**
- * JDBC dialect for PostgreSQL.
- *
- * SQRL: Add quoting to identifiers
- */
-public class SqrlPostgresDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-datetime.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to PostgreSQL docs:
- // https://www.postgresql.org/docs/12/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL
- private static final int MAX_DECIMAL_PRECISION = 1000;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public JdbcRowConverter getRowConverter(RowType rowType) {
- return new SqrlPostgresRowConverter(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("org.postgresql.Driver");
- }
-
- /** Postgres upsert query. It use ON CONFLICT ... DO UPDATE SET.. to replace into Postgres. */
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- String uniqueColumns =
- Arrays.stream(uniqueKeyFields).map(this::quoteIdentifier).collect(Collectors.joining(", "));
- String updateClause =
- Arrays.stream(fieldNames)
- .map(f -> quoteIdentifier(f) + "=EXCLUDED." + quoteIdentifier(f))
- .collect(Collectors.joining(", "));
- return Optional.of(
- getInsertIntoStatement(tableName, fieldNames)
- + " ON CONFLICT ("
- + uniqueColumns
- + ")"
- + " DO UPDATE SET "
- + updateClause);
- }
-
- @Override
- public void validate(RowType rowType) throws ValidationException {
- List unsupportedTypes =
- rowType.getFields().stream()
- .map(RowField::getType)
- .filter(type -> LogicalTypeRoot.RAW.equals(type.getTypeRoot()))
- .filter(type -> !isSupportedType(type))
- .collect(Collectors.toList());
-
- if (!unsupportedTypes.isEmpty()) {
- throw new ValidationException(
- String.format(
- "The %s dialect doesn't support type: %s.", this.dialectName(), unsupportedTypes));
- }
-
- super.validate(rowType);
- }
-
- private boolean isSupportedType(LogicalType type) {
- return SqrlPostgresRowConverter.sqrlSerializers.containsKey(type.getDefaultConversion());
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "\"" + identifier + "\"";
- }
-
- @Override
- public String dialectName() {
- return "PostgreSQL";
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // The data types used in PostgreSQL are list at:
- // https://www.postgresql.org/docs/12/datatype.html
-
- // TODO: We can't convert BINARY data type to
- // PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO in
- // LegacyTypeInfoDataTypeConverter.
-
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
- LogicalTypeRoot.ARRAY,
- LogicalTypeRoot.MAP,
- LogicalTypeRoot.RAW // see validate() for supported structured types
- );
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
deleted file mode 100644
index ba923c2..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/jdbc/SqrlPostgresRowConverter.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import com.datasqrl.type.JdbcTypeSerializer;
-import java.lang.reflect.Type;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.stream.Collectors;
-import org.apache.flink.table.data.GenericArrayData;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
-import org.postgresql.jdbc.PgArray;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * PostgreSQL.
- *
- * SQRL:Add array support
- */
-public class SqrlPostgresRowConverter extends SqrlBaseJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- public static final Map<
- Type, JdbcTypeSerializer>
- sqrlSerializers = discoverSerializers();
-
- private static Map<
- Type, JdbcTypeSerializer>
- discoverSerializers() {
- return ServiceLoader.load(JdbcTypeSerializer.class).stream()
- .map(f -> f.get())
- .filter(f -> f.getDialectId().equalsIgnoreCase("postgres"))
- .collect(Collectors.toMap(JdbcTypeSerializer::getConversionClass, t -> t));
- }
-
- @Override
- public String converterName() {
- return "PostgreSQL";
- }
-
- public SqrlPostgresRowConverter(RowType rowType) {
- super(rowType);
- }
-
- @Override
- public JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getDeserializerConverter().create();
- } else {
- return super.createInternalConverter(type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter wrapIntoNullableExternalConverter(
- JdbcSerializationConverter jdbcSerializationConverter, LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return jdbcSerializationConverter::serialize;
- } else {
- return super.wrapIntoNullableExternalConverter(jdbcSerializationConverter, type);
- }
- }
-
- @Override
- protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
- if (sqrlSerializers.containsKey(type.getDefaultConversion())) {
- return sqrlSerializers.get(type.getDefaultConversion()).getSerializerConverter(type).create();
- } else {
- return super.createExternalConverter(type);
- }
- }
-
- @Override
- protected String getArrayType() {
- return "bytea";
- }
-
- @Override
- public JdbcDeserializationConverter createArrayConverter(ArrayType arrayType) {
- // Since PGJDBC 42.2.15 (https://github.com/pgjdbc/pgjdbc/pull/1194) bytea[] is wrapped in
- // primitive byte arrays
- final Class> elementClass =
- LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
- final JdbcDeserializationConverter elementConverter =
- createNullableInternalConverter(arrayType.getElementType());
- return val -> {
- // sqrl: check if scalar array
-
- Object[] in;
- if (val instanceof PgArray) {
- PgArray pgArray = (PgArray) val;
- in = (Object[]) pgArray.getArray();
- } else {
- in = (Object[]) val;
- }
- final Object[] array =
- (Object[]) java.lang.reflect.Array.newInstance(elementClass, in.length);
- for (int i = 0; i < in.length; i++) {
- array[i] = elementConverter.deserialize(in[i]);
- }
- return new GenericArrayData(array);
- };
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
deleted file mode 100644
index 547c983..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/FlinkArrayTypeUtil.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class FlinkArrayTypeUtil {
-
- public static LogicalType getBaseFlinkArrayType(LogicalType type) {
- if (type instanceof ArrayType) {
- return getBaseFlinkArrayType(((ArrayType) type).getElementType());
- }
- return type;
- }
-
- public static boolean isScalarArray(LogicalType type) {
- if (type instanceof ArrayType) {
- LogicalType elementType = ((ArrayType) type).getElementType();
- return isScalar(elementType) || isScalarArray(elementType);
- }
- return false;
- }
-
- public static boolean isScalar(LogicalType type) {
- switch (type.getTypeRoot()) {
- case BOOLEAN:
- case TINYINT:
- case SMALLINT:
- case INTEGER:
- case BIGINT:
- case FLOAT:
- case DOUBLE:
- case CHAR:
- case VARCHAR:
- case BINARY:
- case VARBINARY:
- case DATE:
- case TIME_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- case DECIMAL:
- return true;
- default:
- return false;
- }
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
deleted file mode 100644
index 0a726ee..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/JdbcTypeSerializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.LogicalType;
-
-public interface JdbcTypeSerializer {
-
- String getDialectId();
-
- Class getConversionClass();
-
- String dialectTypeName();
-
- GenericDeserializationConverter getDeserializerConverter();
-
- GenericSerializationConverter getSerializerConverter(LogicalType type);
-
- interface GenericSerializationConverter {
- T create();
- }
-
- interface GenericDeserializationConverter {
- T create();
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
deleted file mode 100644
index 96ee9f9..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresArrayTypeConverter.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-
-public class PostgresArrayTypeConverter {
-
- /** Return the base array type for flink type */
- public static String getArrayScalarName(LogicalType type) {
- switch (type.getTypeRoot()) {
- case CHAR:
- case VARCHAR:
- return "text";
- case BOOLEAN:
- return "boolean";
- case BINARY:
- case VARBINARY:
- return "bytea";
- case DECIMAL:
- return "decimal";
- case TINYINT:
- return "smallint";
- case SMALLINT:
- return "smallint";
- case INTEGER:
- return "integer";
- case BIGINT:
- return "bigint";
- case FLOAT:
- return "real"; // PostgreSQL uses REAL for float
- case DOUBLE:
- return "double";
- case DATE:
- return "date";
- case TIME_WITHOUT_TIME_ZONE:
- return "time without time zone";
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- return "timestamp without time zone";
- case TIMESTAMP_WITH_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return "timestamptz";
- case INTERVAL_YEAR_MONTH:
- return "interval year to month";
- case INTERVAL_DAY_TIME:
- return "interval day to second";
- case NULL:
- return "void";
- case ARRAY:
- return getArrayScalarName(((ArrayType) type).getElementType());
- case MULTISET:
- case MAP:
- case ROW:
- case DISTINCT_TYPE:
- case STRUCTURED_TYPE:
- case RAW:
- case SYMBOL:
- case UNRESOLVED:
- default:
- throw new RuntimeException("Cannot convert type to array type");
- }
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
deleted file mode 100644
index 7c0eb5d..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresJsonTypeSerializer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.json.FlinkJsonType;
-import com.datasqrl.json.FlinkJsonTypeSerializer;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.table.data.RawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.postgresql.util.PGobject;
-
-public class PostgresJsonTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return FlinkJsonType.class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () ->
- (val) -> {
- FlinkJsonType t = (FlinkJsonType) val;
- return t.getJson();
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- FlinkJsonTypeSerializer typeSerializer = new FlinkJsonTypeSerializer();
-
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- RawValueData object = val.getRawValue(index);
- FlinkJsonType vec = object.toObject(typeSerializer);
- if (vec == null) {
- statement.setObject(index, null);
- } else {
- pgObject.setValue(vec.getJson().toString());
- statement.setObject(index, pgObject);
- }
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java b/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
deleted file mode 100644
index c86f5d0..0000000
--- a/sqrl-jdbc-1.19/src/main/java/com/datasqrl/type/PostgresRowTypeSerializer.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.type;
-
-import com.datasqrl.format.SqrlRowDataToJsonConverters;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.formats.common.TimestampFormat;
-import org.apache.flink.formats.json.JsonFormatOptions.MapNullKeyMode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.types.logical.ArrayType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.types.Row;
-import org.postgresql.util.PGobject;
-
-public class PostgresRowTypeSerializer
- implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return Row[].class;
- }
-
- @Override
- public String dialectTypeName() {
- return "jsonb";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () -> {
- return (val) -> null;
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- ObjectMapper mapper = new ObjectMapper();
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- SqrlRowDataToJsonConverters rowDataToJsonConverter =
- new SqrlRowDataToJsonConverters(TimestampFormat.SQL, MapNullKeyMode.DROP, "null");
-
- ArrayType arrayType = (ArrayType) type;
- ObjectNode objectNode = mapper.createObjectNode();
- JsonNode convert =
- rowDataToJsonConverter
- .createConverter(arrayType.getElementType())
- .convert(mapper, objectNode, val);
-
- PGobject pgObject = new PGobject();
- pgObject.setType("json");
- pgObject.setValue(convert.toString());
- statement.setObject(index, pgObject);
- } else {
- statement.setObject(index, null);
- }
- };
- }
-}
diff --git a/sqrl-jdbc-1.19/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer b/sqrl-jdbc-1.19/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
deleted file mode 100644
index 0673d25..0000000
--- a/sqrl-jdbc-1.19/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
+++ /dev/null
@@ -1,2 +0,0 @@
-com.datasqrl.type.PostgresRowTypeSerializer
-com.datasqrl.type.PostgresJsonTypeSerializer
\ No newline at end of file
diff --git a/sqrl-jdbc-1.19/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/sqrl-jdbc-1.19/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
deleted file mode 100644
index 20a59c9..0000000
--- a/sqrl-jdbc-1.19/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.jdbc.SqrlJdbcDynamicTableFactory
\ No newline at end of file
diff --git a/sqrl-jdbc-1.19/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java b/sqrl-jdbc-1.19/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
deleted file mode 100644
index a528bb8..0000000
--- a/sqrl-jdbc-1.19/src/test/java/com/datasqrl/jdbc/FlinkJdbcTest.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.jdbc;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.EnvironmentSettings;
-import org.apache.flink.table.api.ResultKind;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.junit5.MiniClusterExtension;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.testcontainers.containers.PostgreSQLContainer;
-
-@ExtendWith(MiniClusterExtension.class)
-public class FlinkJdbcTest {
-
- @Test
- public void testFlinkWithPostgres() throws Exception {
- // Start PostgreSQL container
- try (PostgreSQLContainer> postgres = new PostgreSQLContainer<>("postgres:14")) {
- postgres.start();
- // Establish a connection and create the PostgreSQL table
- try (Connection conn =
- DriverManager.getConnection(
- postgres.getJdbcUrl(), postgres.getUsername(), postgres.getPassword());
- Statement stmt = conn.createStatement()) {
- String createTableSQL = "CREATE TABLE test_table (" + " \"arrayOfRows\" JSONB " + ")";
- stmt.executeUpdate(createTableSQL);
- }
-
- // Set up Flink environment
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
-
- // Define the schema
- String createSourceTable =
- "CREATE TABLE datagen_source ("
- + " arrayOfRows ARRAY> "
- + ") WITH ("
- + " 'connector' = 'datagen',"
- + " 'number-of-rows' = '10'"
- + ")";
-
- String createSinkTable =
- "CREATE TABLE jdbc_sink ("
- + " arrayOfRows RAW('com.datasqrl.json.FlinkJsonType', 'ADFjb20uZGF0YXNxcmwuanNvbi5GbGlua0pzb25UeXBlU2VyaWFsaXplclNuYXBzaG90AAAAAQApY29tLmRhdGFzcXJsLmpzb24uRmxpbmtKc29uVHlwZVNlcmlhbGl6ZXI=') "
- + ") WITH ("
- + " 'connector' = 'jdbc-sqrl', "
- + " 'url' = '"
- + postgres.getJdbcUrl()
- + "', "
- + " 'table-name' = 'test_table', "
- + " 'username' = '"
- + postgres.getUsername()
- + "', "
- + " 'password' = '"
- + postgres.getPassword()
- + "'"
- + ")";
-
- // Register tables in the environment
- tableEnv.executeSql(
- "CREATE TEMPORARY FUNCTION IF NOT EXISTS `tojson` AS 'com.datasqrl.json.ToJson' LANGUAGE JAVA");
- tableEnv.executeSql(createSourceTable);
- tableEnv.executeSql(createSinkTable);
-
- // Set up a simple Flink job
- TableResult tableResult =
- tableEnv.executeSql(
- "INSERT INTO jdbc_sink SELECT tojson(arrayOfRows) AS arrayOfRows FROM datagen_source");
- tableResult.print();
-
- assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind());
- }
- }
-
- @Test
- public void testWriteAndReadToPostgres() throws Exception {
- try (PostgreSQLContainer> postgresContainer = new PostgreSQLContainer<>("postgres:14")) {
- postgresContainer.start();
- try (Connection conn =
- DriverManager.getConnection(
- postgresContainer.getJdbcUrl(),
- postgresContainer.getUsername(),
- postgresContainer.getPassword());
- Statement stmt = conn.createStatement()) {
- String createTableSQL = "CREATE TABLE test_table (" + " id BIGINT, name VARCHAR " + ")";
- stmt.executeUpdate(createTableSQL);
- }
-
- // Set up Flink mini cluster environment
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
- StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
-
- // Create a PostgreSQL table using the Table API
- tEnv.executeSql(
- "CREATE TABLE test_table ("
- + "id BIGINT,"
- + "name STRING"
- + ") WITH ("
- + "'connector' = 'jdbc',"
- + "'url' = '"
- + postgresContainer.getJdbcUrl()
- + "',"
- + "'table-name' = 'test_table',"
- + "'username' = '"
- + postgresContainer.getUsername()
- + "',"
- + "'password' = '"
- + postgresContainer.getPassword()
- + "'"
- + ")");
-
- // Create a DataGen source to generate 10 rows of data
- tEnv.executeSql(
- "CREATE TABLE datagen_source ("
- + "id BIGINT,"
- + "name STRING"
- + ") WITH ("
- + "'connector' = 'datagen',"
- + "'rows-per-second' = '1',"
- + "'fields.id.kind' = 'sequence',"
- + "'fields.id.start' = '1',"
- + "'fields.id.end' = '10',"
- + "'fields.name.length' = '10'"
- + ")");
-
- // Insert data from the DataGen source into the PostgreSQL table
- tEnv.executeSql("INSERT INTO test_table SELECT * FROM datagen_source").await();
-
- // Verify the data has been inserted by querying the PostgreSQL database directly
- Connection connection = postgresContainer.createConnection("");
- Statement statement = connection.createStatement();
- ResultSet resultSet = statement.executeQuery("SELECT COUNT(*) FROM test_table");
-
- int count = 0;
- if (resultSet.next()) {
- count = resultSet.getInt(1);
- }
-
- // Validate that 10 rows were inserted
- assertEquals(10, count);
-
- connection.close();
- }
- }
-}
diff --git a/sqrl-json/README.md b/sqrl-json/README.md
deleted file mode 100644
index 0b66da1..0000000
--- a/sqrl-json/README.md
+++ /dev/null
@@ -1,13 +0,0 @@
-| Function Documentation |
-|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| `JsonArray(ANY...) → Json` Create a JSON array from a varying number of objects. Example: `JsonArray('element1', 2, 'element3') → ["element1", 2, "element3"]` |
-| `JsonArrayAgg(ANY) → Json` Aggregate values into a JSON array. Optional parameters allow for various data types to be included in the aggregation. Example: `JsonArrayAgg(col) → ["value1", "value2", ...]` |
-| `JsonConcat(Json, Json) → Json` Concatenate two JSON objects. Example: `JsonConcat(json, json) → {...}` |
-| `JsonExists(Json, String) → Boolean` Check if a JSON path exists within a JSON object. Example: `JsonExists(json, '$.path') → true` |
-| `JsonExtract(Json, ANY) → String` Extract a value from a JSON object using a JSON path. Optional parameters allow for default values of different types if extraction fails. Example: `JsonExtract(json, '$.path') → "extracted"` Optional examples: `JsonExtract(json, '$.path', "default") → "extracted"` `JsonExtract(json, '$.path', true) → true` `JsonExtract(json, '$.path', 10.5) → 10.5` `JsonExtract(json, '$.path', 5) → 5` |
-| `JsonObject(ANY...) → Json` Create a JSON object from key-value pairs. Example: `JsonObject('key1', 'value1', 'key2', 2) → {"key1": "value1", "key2": 2}` |
-| `JsonObjectAgg(String key, ANY value) → Json` Aggregate key-value pairs into a JSON object. Optional parameters allow for various data types to be used as values. Example: `JsonObjectAgg('key1', 'value1', 10.5, 2L, 3, json) → {"key1": "value1", "key2": 10.5, "key3": 2, "key4": 3, ...}` |
-| `JsonQuery(Json, String) → String` Execute a JSON path query on a JSON object and return the result as a JSON string. Example: `JsonQuery(json, '$.path') → "result"` |
-| `JsonToString(Json) → String` Convert a JSON object to its string representation. Example: `JsonToString(json) → "{\"key\":\"value\"}"` |
-| `ToJson(String) → Json` Convert a string containing JSON into a JSON object. Example: `ToJson('{"key":"value"}') → { "key" : "value" }` |
-
diff --git a/sqrl-json/pom.xml b/sqrl-json/pom.xml
deleted file mode 100644
index 1e2c142..0000000
--- a/sqrl-json/pom.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-json
-
- Json functions for flink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-api-java-bridge
- 1.19.0
- provided
-
-
- org.apache.flink
- flink-table-common
- 1.19.0
- provided
-
-
- org.apache.flink
- flink-table-runtime
- 1.19.0
- provided
-
-
- com.jayway.jsonpath
- json-path
- 2.9.0
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
-
-
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/ArrayAgg.java b/sqrl-json/src/main/java/com/datasqrl/json/ArrayAgg.java
deleted file mode 100644
index a846e8e..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/ArrayAgg.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import java.util.List;
-import lombok.Value;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.table.annotation.DataTypeHint;
-
-@Value
-public class ArrayAgg {
-
- @DataTypeHint(value = "RAW")
- private List objects;
-
- public void add(JsonNode value) {
- objects.add(value);
- }
-
- public void remove(JsonNode value) {
- objects.remove(value);
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonArray.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonArray.java
deleted file mode 100644
index 5bd6800..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonArray.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import static com.datasqrl.json.JsonFunctions.createJsonArgumentTypeStrategy;
-import static com.datasqrl.json.JsonFunctions.createJsonType;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.InputTypeStrategies;
-import org.apache.flink.table.types.inference.InputTypeStrategy;
-import org.apache.flink.table.types.inference.TypeInference;
-import org.apache.flink.table.types.inference.TypeStrategies;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/** Creates a JSON array from the list of JSON objects and scalar values. */
-@AutoService(StandardLibraryFunction.class)
-public class JsonArray extends ScalarFunction implements StandardLibraryFunction {
- private static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- public FlinkJsonType eval(Object... objects) {
- ArrayNode arrayNode = mapper.createArrayNode();
-
- for (Object value : objects) {
- if (value instanceof FlinkJsonType) {
- FlinkJsonType type = (FlinkJsonType) value;
- arrayNode.add(type.json);
- } else {
- arrayNode.addPOJO(value);
- }
- }
-
- return new FlinkJsonType(arrayNode);
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- InputTypeStrategy inputTypeStrategy =
- InputTypeStrategies.varyingSequence(createJsonArgumentTypeStrategy(typeFactory));
-
- return TypeInference.newBuilder()
- .inputTypeStrategy(inputTypeStrategy)
- .outputTypeStrategy(TypeStrategies.explicit(createJsonType(typeFactory)))
- .build();
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonArrayAgg.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonArrayAgg.java
deleted file mode 100644
index 7557645..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonArrayAgg.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.util.ArrayList;
-import lombok.SneakyThrows;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
-import org.apache.flink.table.functions.AggregateFunction;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/** Aggregation function that aggregates JSON objects into a JSON array. */
-@AutoService(StandardLibraryFunction.class)
-public class JsonArrayAgg extends AggregateFunction
- implements StandardLibraryFunction {
-
- private static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- @Override
- public ArrayAgg createAccumulator() {
- return new ArrayAgg(new ArrayList<>());
- }
-
- public void accumulate(ArrayAgg accumulator, String value) {
- accumulator.add(mapper.getNodeFactory().textNode(value));
- }
-
- @SneakyThrows
- public void accumulate(ArrayAgg accumulator, FlinkJsonType value) {
- if (value != null) {
- accumulator.add(value.json);
- } else {
- accumulator.add(null);
- }
- }
-
- public void accumulate(ArrayAgg accumulator, Double value) {
- accumulator.add(mapper.getNodeFactory().numberNode(value));
- }
-
- public void accumulate(ArrayAgg accumulator, Long value) {
- accumulator.add(mapper.getNodeFactory().numberNode(value));
- }
-
- public void accumulate(ArrayAgg accumulator, Integer value) {
- accumulator.add(mapper.getNodeFactory().numberNode(value));
- }
-
- public void retract(ArrayAgg accumulator, String value) {
- accumulator.remove(mapper.getNodeFactory().textNode(value));
- }
-
- @SneakyThrows
- public void retract(ArrayAgg accumulator, FlinkJsonType value) {
- if (value != null) {
- accumulator.remove(value.json);
- } else {
- accumulator.remove(null);
- }
- }
-
- public void retract(ArrayAgg accumulator, Double value) {
- accumulator.remove(mapper.getNodeFactory().numberNode(value));
- }
-
- public void retract(ArrayAgg accumulator, Long value) {
- accumulator.remove(mapper.getNodeFactory().numberNode(value));
- }
-
- public void retract(ArrayAgg accumulator, Integer value) {
- accumulator.remove(mapper.getNodeFactory().numberNode(value));
- }
-
- public void merge(ArrayAgg accumulator, java.lang.Iterable iterable) {
- iterable.forEach(o -> accumulator.getObjects().addAll(o.getObjects()));
- }
-
- @Override
- public FlinkJsonType getValue(ArrayAgg accumulator) {
- ArrayNode arrayNode = mapper.createArrayNode();
- for (Object o : accumulator.getObjects()) {
- if (o instanceof FlinkJsonType) {
- arrayNode.add(((FlinkJsonType) o).json);
- } else {
- arrayNode.addPOJO(o);
- }
- }
- return new FlinkJsonType(arrayNode);
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonConcat.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonConcat.java
deleted file mode 100644
index 9815b4f..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonConcat.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/**
- * Merges two JSON objects into one. If two objects share the same key, the value from the later
- * object is used.
- */
-@AutoService(StandardLibraryFunction.class)
-public class JsonConcat extends ScalarFunction implements StandardLibraryFunction {
-
- public FlinkJsonType eval(FlinkJsonType json1, FlinkJsonType json2) {
- if (json1 == null || json2 == null) {
- return null;
- }
- try {
- ObjectNode node1 = (ObjectNode) json1.getJson();
- ObjectNode node2 = (ObjectNode) json2.getJson();
-
- node1.setAll(node2);
- return new FlinkJsonType(node1);
- } catch (Exception e) {
- return null;
- }
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonExists.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonExists.java
deleted file mode 100644
index f375ab4..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonExists.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.runtime.functions.SqlJsonUtils;
-
-/** For a given JSON object, checks whether the provided JSON path exists */
-@AutoService(StandardLibraryFunction.class)
-public class JsonExists extends ScalarFunction implements StandardLibraryFunction {
-
- public Boolean eval(FlinkJsonType json, String path) {
- if (json == null) {
- return null;
- }
- try {
- return SqlJsonUtils.jsonExists(json.json.toString(), path);
- } catch (Exception e) {
- return false;
- }
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonExtract.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonExtract.java
deleted file mode 100644
index 1c7dcf8..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonExtract.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.ReadContext;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/**
- * Extracts a value from the JSON object based on the provided JSON path. An optional third argument
- * can be provided to specify a default value when the given JSON path does not yield a value for
- * the JSON object.
- */
-@AutoService(StandardLibraryFunction.class)
-public class JsonExtract extends ScalarFunction implements StandardLibraryFunction {
-
- public String eval(FlinkJsonType input, String pathSpec) {
- if (input == null) {
- return null;
- }
- try {
- JsonNode jsonNode = input.getJson();
- ReadContext ctx = JsonPath.parse(jsonNode.toString());
- Object value = ctx.read(pathSpec);
- if (value == null) {
- return null;
- }
- return value.toString();
- } catch (Exception e) {
- return null;
- }
- }
-
- public String eval(FlinkJsonType input, String pathSpec, String defaultValue) {
- if (input == null) {
- return null;
- }
- try {
- ReadContext ctx = JsonPath.parse(input.getJson().toString());
- JsonPath parse = JsonPath.compile(pathSpec);
- return ctx.read(parse, String.class);
- } catch (Exception e) {
- return defaultValue;
- }
- }
-
- public Boolean eval(FlinkJsonType input, String pathSpec, Boolean defaultValue) {
- if (input == null) {
- return null;
- }
- try {
- ReadContext ctx = JsonPath.parse(input.getJson().toString());
- JsonPath parse = JsonPath.compile(pathSpec);
- return ctx.read(parse, Boolean.class);
- } catch (Exception e) {
- return defaultValue;
- }
- }
-
- public Double eval(FlinkJsonType input, String pathSpec, Double defaultValue) {
- if (input == null) {
- return null;
- }
- try {
- ReadContext ctx = JsonPath.parse(input.getJson().toString());
- JsonPath parse = JsonPath.compile(pathSpec);
- return ctx.read(parse, Double.class);
- } catch (Exception e) {
- return defaultValue;
- }
- }
-
- public Integer eval(FlinkJsonType input, String pathSpec, Integer defaultValue) {
- if (input == null) {
- return null;
- }
- try {
- ReadContext ctx = JsonPath.parse(input.getJson().toString());
- JsonPath parse = JsonPath.compile(pathSpec);
- return ctx.read(parse, Integer.class);
- } catch (Exception e) {
- return defaultValue;
- }
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonFunctions.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonFunctions.java
deleted file mode 100644
index 623307f..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonFunctions.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.inference.ArgumentTypeStrategy;
-import org.apache.flink.table.types.inference.InputTypeStrategies;
-import org.apache.flink.table.types.inference.strategies.SpecificInputTypeStrategies;
-
-public class JsonFunctions {
-
- public static final ToJson TO_JSON = new ToJson();
- public static final JsonToString JSON_TO_STRING = new JsonToString();
- public static final JsonObject JSON_OBJECT = new JsonObject();
- public static final JsonArray JSON_ARRAY = new JsonArray();
- public static final JsonExtract JSON_EXTRACT = new JsonExtract();
- public static final JsonQuery JSON_QUERY = new JsonQuery();
- public static final JsonExists JSON_EXISTS = new JsonExists();
- public static final JsonArrayAgg JSON_ARRAYAGG = new JsonArrayAgg();
- public static final JsonObjectAgg JSON_OBJECTAGG = new JsonObjectAgg();
- public static final JsonConcat JSON_CONCAT = new JsonConcat();
-
- public static ArgumentTypeStrategy createJsonArgumentTypeStrategy(DataTypeFactory typeFactory) {
- return InputTypeStrategies.or(
- SpecificInputTypeStrategies.JSON_ARGUMENT,
- InputTypeStrategies.explicit(createJsonType(typeFactory)));
- }
-
- public static DataType createJsonType(DataTypeFactory typeFactory) {
- DataType dataType = DataTypes.of(FlinkJsonType.class).toDataType(typeFactory);
- return dataType;
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonObject.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonObject.java
deleted file mode 100644
index 5230d55..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonObject.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import static com.datasqrl.json.JsonFunctions.createJsonArgumentTypeStrategy;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.InputTypeStrategies;
-import org.apache.flink.table.types.inference.InputTypeStrategy;
-import org.apache.flink.table.types.inference.TypeInference;
-import org.apache.flink.table.types.inference.TypeStrategies;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/**
- * Creates a JSON object from key-value pairs, where the key is mapped to a field with the
- * associated value. Key-value pairs are provided as a list of even length, with the first element
- * of each pair being the key and the second being the value. If multiple key-value pairs have the
- * same key, the last pair is added to the JSON object.
- */
-@AutoService(StandardLibraryFunction.class)
-public class JsonObject extends ScalarFunction implements StandardLibraryFunction {
- static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- public FlinkJsonType eval(Object... objects) {
- if (objects.length % 2 != 0) {
- throw new IllegalArgumentException("Arguments should be in key-value pairs");
- }
-
- ObjectNode objectNode = mapper.createObjectNode();
-
- for (int i = 0; i < objects.length; i += 2) {
- if (!(objects[i] instanceof String)) {
- throw new IllegalArgumentException("Key must be a string");
- }
- String key = (String) objects[i];
- Object value = objects[i + 1];
- if (value instanceof FlinkJsonType) {
- FlinkJsonType type = (FlinkJsonType) value;
- objectNode.put(key, type.json);
- } else {
- objectNode.putPOJO(key, value);
- }
- }
-
- return new FlinkJsonType(objectNode);
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- InputTypeStrategy anyJsonCompatibleArg =
- InputTypeStrategies.repeatingSequence(createJsonArgumentTypeStrategy(typeFactory));
-
- InputTypeStrategy inputTypeStrategy =
- InputTypeStrategies.compositeSequence().finishWithVarying(anyJsonCompatibleArg);
-
- return TypeInference.newBuilder()
- .inputTypeStrategy(inputTypeStrategy)
- .outputTypeStrategy(
- TypeStrategies.explicit(DataTypes.of(FlinkJsonType.class).toDataType(typeFactory)))
- .build();
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonObjectAgg.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonObjectAgg.java
deleted file mode 100644
index 23ea782..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonObjectAgg.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.util.LinkedHashMap;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.annotation.DataTypeHint;
-import org.apache.flink.table.annotation.FunctionHint;
-import org.apache.flink.table.annotation.InputGroup;
-import org.apache.flink.table.functions.AggregateFunction;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/**
- * Aggregation function that merges JSON objects into a single JSON object. If two JSON objects
- * share the same field name, the value of the later one is used in the aggregated result.
- */
-@FunctionHint(
- output =
- @DataTypeHint(
- value = "RAW",
- bridgedTo = FlinkJsonType.class,
- rawSerializer = FlinkJsonTypeSerializer.class))
-@AutoService(StandardLibraryFunction.class)
-public class JsonObjectAgg extends AggregateFunction
- implements StandardLibraryFunction {
-
- private static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- @Override
- public ObjectAgg createAccumulator() {
- return new ObjectAgg(new LinkedHashMap<>());
- }
-
- public void accumulate(ObjectAgg accumulator, String key, String value) {
- accumulateObject(accumulator, key, value);
- }
-
- public void accumulate(
- ObjectAgg accumulator, String key, @DataTypeHint(inputGroup = InputGroup.ANY) Object value) {
- if (value instanceof FlinkJsonType) {
- accumulateObject(accumulator, key, ((FlinkJsonType) value).getJson());
- } else {
- accumulator.add(key, mapper.getNodeFactory().pojoNode(value));
- }
- }
-
- public void accumulate(ObjectAgg accumulator, String key, Double value) {
- accumulateObject(accumulator, key, value);
- }
-
- public void accumulate(ObjectAgg accumulator, String key, Long value) {
- accumulateObject(accumulator, key, value);
- }
-
- public void accumulate(ObjectAgg accumulator, String key, Integer value) {
- accumulateObject(accumulator, key, value);
- }
-
- public void accumulateObject(ObjectAgg accumulator, String key, Object value) {
- accumulator.add(key, mapper.getNodeFactory().pojoNode(value));
- }
-
- public void retract(ObjectAgg accumulator, String key, String value) {
- retractObject(accumulator, key);
- }
-
- public void retract(
- ObjectAgg accumulator, String key, @DataTypeHint(inputGroup = InputGroup.ANY) Object value) {
- retractObject(accumulator, key);
- }
-
- public void retract(ObjectAgg accumulator, String key, Double value) {
- retractObject(accumulator, key);
- }
-
- public void retract(ObjectAgg accumulator, String key, Long value) {
- retractObject(accumulator, key);
- }
-
- public void retract(ObjectAgg accumulator, String key, Integer value) {
- retractObject(accumulator, key);
- }
-
- public void retractObject(ObjectAgg accumulator, String key) {
- accumulator.remove(key);
- }
-
- public void merge(ObjectAgg accumulator, java.lang.Iterable iterable) {
- iterable.forEach(o -> accumulator.getObjects().putAll(o.getObjects()));
- }
-
- @Override
- public FlinkJsonType getValue(ObjectAgg accumulator) {
- ObjectNode objectNode = mapper.createObjectNode();
- accumulator.getObjects().forEach(objectNode::putPOJO);
- return new FlinkJsonType(objectNode);
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonQuery.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonQuery.java
deleted file mode 100644
index 866ba03..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonQuery.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import com.jayway.jsonpath.JsonPath;
-import com.jayway.jsonpath.ReadContext;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/**
- * For a given JSON object, executes a JSON path query against the object and returns the result as
- * string.
- */
-@AutoService(StandardLibraryFunction.class)
-public class JsonQuery extends ScalarFunction implements StandardLibraryFunction {
- static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- public String eval(FlinkJsonType input, String pathSpec) {
- if (input == null) {
- return null;
- }
- try {
- JsonNode jsonNode = input.getJson();
- ReadContext ctx = JsonPath.parse(jsonNode.toString());
- Object result = ctx.read(pathSpec);
- return mapper.writeValueAsString(result); // Convert the result back to JSON string
- } catch (Exception e) {
- return null;
- }
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/JsonToString.java b/sqrl-json/src/main/java/com/datasqrl/json/JsonToString.java
deleted file mode 100644
index 4fc8a88..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/JsonToString.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.SqrlCastFunction;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-
-@AutoService(StandardLibraryFunction.class)
-public class JsonToString extends ScalarFunction
- implements StandardLibraryFunction, SqrlCastFunction {
-
- public String eval(FlinkJsonType json) {
- if (json == null) {
- return null;
- }
- return json.getJson().toString();
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/ObjectAgg.java b/sqrl-json/src/main/java/com/datasqrl/json/ObjectAgg.java
deleted file mode 100644
index 72eb4dc..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/ObjectAgg.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import java.util.Map;
-import lombok.Getter;
-import lombok.Value;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.table.annotation.DataTypeHint;
-
-@Value
-public class ObjectAgg {
-
- @DataTypeHint(value = "RAW")
- @Getter
- Map objects;
-
- public void add(String key, JsonNode value) {
- if (key != null) {
- objects.put(key, value);
- }
- }
-
- public void remove(String key) {
- if (key != null) {
- objects.remove(key);
- }
- }
-}
diff --git a/sqrl-json/src/main/java/com/datasqrl/json/ToJson.java b/sqrl-json/src/main/java/com/datasqrl/json/ToJson.java
deleted file mode 100644
index 899e3d7..0000000
--- a/sqrl-json/src/main/java/com/datasqrl/json/ToJson.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import com.datasqrl.function.SqrlCastFunction;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.flink.table.annotation.DataTypeHint;
-import org.apache.flink.table.annotation.InputGroup;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.types.Row;
-import org.apache.flink.util.jackson.JacksonMapperFactory;
-
-/** Parses a JSON object from string */
-@AutoService(StandardLibraryFunction.class)
-public class ToJson extends ScalarFunction implements StandardLibraryFunction, SqrlCastFunction {
-
- public static final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
-
- public FlinkJsonType eval(String json) {
- if (json == null) {
- return null;
- }
- try {
- return new FlinkJsonType(mapper.readTree(json));
- } catch (JsonProcessingException e) {
- return null;
- }
- }
-
- public FlinkJsonType eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object json) {
- if (json == null) {
- return null;
- }
- if (json instanceof FlinkJsonType) {
- return (FlinkJsonType) json;
- }
-
- return new FlinkJsonType(unboxFlinkToJsonNode(json));
- }
-
- JsonNode unboxFlinkToJsonNode(Object json) {
- if (json instanceof Row) {
- Row row = (Row) json;
- ObjectNode objectNode = mapper.createObjectNode();
- String[] fieldNames =
- row.getFieldNames(true).toArray(new String[0]); // Get field names in an array
- for (String fieldName : fieldNames) {
- Object field = row.getField(fieldName);
- objectNode.set(fieldName, unboxFlinkToJsonNode(field)); // Recursively unbox each field
- }
- return objectNode;
- } else if (json instanceof Row[]) {
- Row[] rows = (Row[]) json;
- ArrayNode arrayNode = mapper.createArrayNode();
- for (Row row : rows) {
- if (row == null) {
- arrayNode.addNull();
- } else {
- arrayNode.add(unboxFlinkToJsonNode(row)); // Recursively unbox each row in the array
- }
- }
- return arrayNode;
- }
- return mapper.valueToTree(json); // Directly serialize other types
- }
-}
diff --git a/sqrl-json/src/test/java/com/datasqrl/json/JsonConversionTest.java b/sqrl-json/src/test/java/com/datasqrl/json/JsonConversionTest.java
deleted file mode 100644
index 2ea7fb5..0000000
--- a/sqrl-json/src/test/java/com/datasqrl/json/JsonConversionTest.java
+++ /dev/null
@@ -1,490 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// package com.datasqrl.json;
-//
-// import static com.datasqrl.function.SqrlFunction.getFunctionNameFromClass;
-// import static com.datasqrl.plan.local.analyze.RetailSqrlModule.createTableSource;
-// import static org.junit.jupiter.api.Assertions.assertEquals;
-//
-// import com.datasqrl.calcite.Dialect;
-// import com.datasqrl.calcite.function.SqrlTableMacro;
-// import com.datasqrl.calcite.type.TypeFactory;
-// import com.datasqrl.canonicalizer.Name;
-// import com.datasqrl.canonicalizer.NameCanonicalizer;
-// import com.datasqrl.canonicalizer.NamePath;
-// import com.datasqrl.config.SourceFactory;
-// import com.datasqrl.engine.database.relational.ddl.PostgresDDLFactory;
-// import com.datasqrl.engine.database.relational.ddl.statements.CreateTableDDL;
-// import com.datasqrl.error.ErrorCollector;
-// import com.datasqrl.function.SqrlFunction;
-// import com.datasqrl.functions.json.StdJsonLibraryImpl;
-// import com.datasqrl.graphql.AbstractGraphqlTest;
-// import com.datasqrl.io.DataSystemConnectorFactory;
-// import com.datasqrl.io.InMemSourceFactory;
-// import com.datasqrl.io.mem.MemoryConnectorFactory;
-// import com.datasqrl.json.FlinkJsonType;
-// import com.datasqrl.loaders.TableSourceNamespaceObject;
-// import com.datasqrl.module.NamespaceObject;
-// import com.datasqrl.module.SqrlModule;
-// import com.datasqrl.plan.global.PhysicalDAGPlan.EngineSink;
-// import com.datasqrl.plan.local.analyze.MockModuleLoader;
-// import com.datasqrl.plan.table.CalciteTableFactory;
-// import com.datasqrl.plan.table.TableConverter;
-// import com.datasqrl.plan.table.TableIdFactory;
-// import com.datasqrl.plan.validate.ScriptPlanner;
-// import com.datasqrl.util.SnapshotTest;
-// import com.google.auto.service.AutoService;
-// import com.ibm.icu.impl.Pair;
-// import java.io.IOException;
-// import java.sql.Connection;
-// import java.sql.ResultSet;
-// import java.sql.Statement;
-// import java.util.*;
-//
-// import lombok.SneakyThrows;
-// import lombok.Value;
-// import lombok.extern.slf4j.Slf4j;
-// import org.apache.calcite.rel.RelNode;
-// import org.apache.calcite.sql.ScriptNode;
-// import org.apache.calcite.sql.SqrlStatement;
-// import org.apache.flink.api.common.typeinfo.Types;
-// import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-// import org.apache.flink.table.api.Table;
-// import org.apache.flink.table.api.TableResult;
-// import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-// import org.apache.flink.table.functions.FunctionDefinition;
-// import org.apache.flink.table.functions.UserDefinedFunction;
-// import org.apache.flink.test.junit5.MiniClusterExtension;
-// import org.apache.flink.types.Row;
-// import org.junit.jupiter.api.AfterEach;
-// import org.junit.jupiter.api.Assertions;
-// import org.junit.jupiter.api.BeforeAll;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.junit.jupiter.api.TestInfo;
-// import org.junit.jupiter.api.extension.ExtendWith;
-// import org.postgresql.util.PGobject;
-// import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper;
-//
-/// **
-// * A test suite to convert SQRL queries to their respective dialects
-// */
-// @Slf4j
-// @ExtendWith(MiniClusterExtension.class)
-// public class JsonConversionTest extends AbstractGraphqlTest {
-//
-// protected SnapshotTest.Snapshot snapshot;
-// ObjectMapper objectMapper = new ObjectMapper();
-// private ScriptPlanner planner;
-//
-// @BeforeAll
-// public static void setupAll() {
-// createPostgresTable();
-// insertDataIntoPostgresTable();
-// }
-//
-// @SneakyThrows
-// private static void createPostgresTable() {
-// try (Connection conn = AbstractGraphqlTest.getPostgresConnection(); Statement stmt =
-// conn.createStatement()) {
-// String createTableSQL =
-// "CREATE TABLE IF NOT EXISTS jsondata$2 (" + "id INT, " + "json jsonb);";
-// stmt.execute(createTableSQL);
-// }
-// }
-//
-// @SneakyThrows
-// private static void insertDataIntoPostgresTable() {
-// try (Connection conn = AbstractGraphqlTest.getPostgresConnection(); Statement stmt =
-// conn.createStatement()) {
-// String insertSQL = "INSERT INTO jsondata$2 (id, json) VALUES "
-// + "(1, '{\"example\":[1,2,3]}'),(2, '{\"example\":[4,5,6]}');";
-// stmt.execute(insertSQL);
-// }
-// }
-////
-//// @AfterAll
-//// public static void tearDownAll() {
-////// testDatabase.stop();
-//// }
-//
-// @BeforeEach
-// public void setup(TestInfo testInfo) throws IOException {
-// initialize(IntegrationTestSettings.getInMemory(), null, Optional.empty(),
-// ErrorCollector.root(),
-// createJson(), false);
-//
-// this.snapshot = SnapshotTest.Snapshot.of(getClass(), testInfo);
-//
-// this.planner = injector.getInstance(ScriptPlanner.class);
-// runStatement("IMPORT json-data.jsondata TIMESTAMP _ingest_time");
-// }
-//
-// private void runStatement(String statement) {
-// planner.validateStatement(parse(statement));
-// }
-//
-// private SqrlStatement parse(String statement) {
-// return (SqrlStatement) ((ScriptNode)framework.getQueryPlanner().parse(Dialect.SQRL,
-// statement))
-// .getStatements().get(0);
-// }
-//
-// public Map createJson() {
-// CalciteTableFactory tableFactory = new CalciteTableFactory(new TableIdFactory(new
-// HashMap<>()),
-// new TableConverter(new TypeFactory(), framework));
-// SqrlModule module = new SqrlModule() {
-//
-// private final Map tables = new HashMap();
-//
-// @Override
-// public Optional getNamespaceObject(Name name) {
-// NamespaceObject obj = new TableSourceNamespaceObject(
-// RetailSqrlModule.createTableSource(JsonData.class, "data", "json-data"),
-// tableFactory);
-// return Optional.of(obj);
-// }
-//
-// @Override
-// public List getNamespaceObjects() {
-// return new ArrayList<>(tables.values());
-// }
-// };
-//
-// return Map.of(NamePath.of("json-data"), module);
-// }
-//
-// @SneakyThrows
-// private Object executePostgresQuery(String query) {
-// try (Connection conn = AbstractGraphqlTest.getPostgresConnection(); Statement stmt =
-// conn.createStatement()) {
-// System.out.println(query);
-// ResultSet rs = stmt.executeQuery(query);
-// // Assuming the result is a single value for simplicity
-// return rs.next() ? rs.getObject(1) : null;
-// }
-// }
-//
-// @SneakyThrows
-// public Object jsonFunctionTest(String query) {
-// StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-// env.setParallelism(1);
-//
-// // Assuming you have a method to create or get Flink SQL environment
-// StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
-//
-// List inputRows = Arrays.asList(Row.of(1, "{\"example\":[1,2,3]}"),
-// Row.of(2, "{\"example\":[4,5,6]}"));
-//
-// // Create a Table from the list of rows
-// Table inputTable = tableEnv.fromDataStream(env.fromCollection(inputRows,
-// Types.ROW_NAMED(new String[]{"id", "json"}, Types.INT, Types.STRING)));
-//
-// for (FunctionDefinition sqrlFunction : StdJsonLibraryImpl.json) {
-// UserDefinedFunction userDefinedFunction = (UserDefinedFunction) sqrlFunction;
-// tableEnv.createFunction(getFunctionNameFromClass(sqrlFunction.getClass()),
-// userDefinedFunction.getClass());
-// }
-//
-// // Register the Table under a name
-// tableEnv.createTemporaryView("jsondata$2", inputTable);
-//
-// // Run your query
-// Table result = tableEnv.sqlQuery(query);
-// TableResult execute = result.execute();
-// List rows = new ArrayList<>();
-// execute.collect().forEachRemaining(rows::add);
-//
-// return rows.get(rows.size() - 1).getField(0);
-// }
-//
-// @AfterEach
-// public void tearDown() {
-// snapshot.createOrValidate();
-// }
-//
-// @Test
-// public void jsonArrayTest() {
-// testJsonReturn("jsonArray('a', null, 'b', 123)");
-// }
-//
-// @Test
-// public void jsonArrayAgg() {
-// testJsonReturn("jsonArrayAgg(jsonExtract(toJson(json), '$.example[0]', 0))");
-// }
-//
-// @Test
-// public void jsonObjectAgg() {
-// testJsonReturn("jsonObjectAgg('key', toJson(json))");
-// }
-//
-// @Test
-// public void jsonArrayAgg2() {
-// testJsonReturn("jsonArrayAgg(id)");
-// }
-//
-// @Test
-// public void jsonArrayAggNull() {
-// testJsonReturn("jsonArrayAgg(toJson(null))");
-// }
-//
-// @Test
-// public void jsonArrayArray() {
-// testJsonReturn("JSONARRAY(JSONARRAY(1))");
-// }
-//
-// @Test
-// public void jsonExistsTest() {
-// testScalarReturn("jsonExists(toJson('{\"a\": true}'), '$.a')");
-// }
-//
-// @Test
-// public void jsonExtractTest() {
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.a', 'default')");
-// }
-//
-// @Test
-// public void jsonConcat() {
-// testJsonReturn("jsonConcat(toJson('{\"a\": \"hello\"}'), toJson('{\"b\": \"hello\"}'))");
-// }
-//
-// @Test
-// public void jsonObjectTest() {
-// testJsonReturn("jsonObject('key1', 'value1', 'key2', 123)");
-// }
-//
-// @Test
-// public void jsonQueryTest() {
-// testJsonReturn("jsonQuery(toJson('{\"a\": {\"b\": 1}}'), '$.a')");
-// }
-//
-// @Test
-// public void jsonArrayWithNulls() {
-// // Testing JSON array creation with null values
-// testJsonReturn("jsonArray('a', null, 'b', null)");
-// }
-//
-// @Test
-// public void jsonObjectWithNulls() {
-// // Testing JSON object creation with null values
-// testJsonReturn("jsonObject('key1', null, 'key2', 'value2')");
-// }
-//
-// @Test
-// public void jsonExtract() {
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.a')");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultString() {
-// // Test with a default string value
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.b', 'defaultString')");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultInteger() {
-// // Test with a default integer value
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.b', 123)");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultBoolean() {
-// // Test with a default boolean value
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.b', true)");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultBoolean2() {
-// // Test with a default boolean value
-// testScalarReturn("jsonExtract(toJson('{\"a\": false}'), '$.a', true)");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultDouble3() {
-// // Test with a default boolean value
-// testScalarReturn("jsonExtract(toJson('{\"a\": 0.2}'), '$.a', 0.0)");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultDouble4() {
-// // Test with a default boolean value
-// testScalarReturn("jsonExtract(toJson('{\"a\": 0.2}'), '$.a', 0)");
-// }
-//
-// @Test
-// public void jsonExtractWithDefaultNull() {
-// // Test with a default null value
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.b', null)");
-// }
-//
-// @Test
-// public void jsonExtractWithNonexistentPath() {
-// // Test extraction from a nonexistent path (should return default value)
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.nonexistent', 'default')");
-// }
-//
-// @Test
-// public void jsonExtractWithEmptyJson() {
-// // Test extraction from an empty JSON object
-// testScalarReturn("jsonExtract(toJson('{}'), '$.a', 'default')");
-// }
-//
-// @Test
-// public void jsonExtractWithComplexJsonPath() {
-// // Test extraction with a complex JSON path
-// testScalarReturn(
-// "jsonExtract(toJson('{\"a\": {\"b\": {\"c\": \"value\"}}}'), '$.a.b.c', 'default')");
-// }
-//
-// @Test
-// public void jsonExtractWithArrayPath() {
-// // Test extraction where the path leads to an array
-// testScalarReturn("jsonExtract(toJson('{\"a\": [1, 2, 3]}'), '$.a[1]', 'default')");
-// }
-//
-// @Test
-// public void jsonExtractWithNumericDefault() {
-// // Test extraction with a numeric default value
-// testScalarReturn("jsonExtract(toJson('{\"a\": \"hello\"}'), '$.b', 0)");
-// }
-//
-// @Test
-// public void jsonObject() {
-// // Test extraction with a numeric default value
-// testJsonReturn("jsonObject('key', toJson('{\"a\": \"hello\"}'), 'key2', 0)");
-// }
-//
-// @Test
-// public void jsonArrayWithMixedDataTypes() {
-// // Testing JSON array creation with mixed data types
-// testJsonReturn("jsonArray('a', 1, true, null, 3.14)");
-// }
-//
-// @Test
-// public void jsonArrayWithNestedArrays() {
-// // Testing JSON array creation with nested arrays
-// testJsonReturn("jsonArray('a', jsonArray('nested', 1), 'b', jsonArray('nested', 2))");
-// }
-//
-// @Test
-// public void jsonArrayWithEmptyValues() {
-// // Testing JSON array creation with empty values
-// testJsonReturn("jsonArray('', '', '', '')");
-// }
-//
-// @Test
-// public void jsonObjectWithMixedDataTypes() {
-// // Testing JSON object creation with mixed data types
-// testJsonReturn("jsonObject('string', 'text', 'number', 123, 'boolean', true)");
-// }
-//
-// @Test
-// public void jsonObjectWithNestedObjects() {
-// // Testing JSON object creation with nested objects
-// testJsonReturn("jsonObject('key1', jsonObject('nestedKey', 'nestedValue'), 'key2',
-// 'value2')");
-// }
-//
-// @Test
-// public void jsonObjectWithEmptyKeys() {
-// // Testing JSON object creation with empty keys
-// testJsonReturn("jsonObject('', 'value1', '', 'value2')");
-// }
-//
-// @SneakyThrows
-// private void testJsonReturn(String function) {
-// Pair x = executeScript(function);
-// Assertions.assertEquals(objectMapper.readTree((String) x.first),
-// objectMapper.readTree((String) x.second));
-// }
-//
-// @SneakyThrows
-// private void testScalarReturn(String function) {
-// Pair x = executeScript(function);
-// assertEquals(x.first.toString().trim(), x.second.toString().trim());
-// }
-//
-// public Pair executeScript(String fncName) {
-// runStatement("IMPORT json.*");
-// runStatement("X(@a: Int) := SELECT " + fncName + " AS json FROM jsondata");
-// return convert("X");
-// }
-//
-// @SneakyThrows
-// private Pair convert(String fncName) {
-// SqrlTableMacro x = framework.getQueryPlanner().getSchema().getTableFunction(fncName);
-// RelNode relNode = x.getViewTransform().get();
-//
-// RelNode pgRelNode = framework.getQueryPlanner().convertRelToDialect(Dialect.POSTGRES,
-// relNode);
-// String pgQuery = framework.getQueryPlanner().relToString(Dialect.POSTGRES,
-// pgRelNode).getSql();
-// snapshot.addContent(pgQuery, "postgres");
-//
-// // Execute Postgres query
-// Object pgResult = executePostgresQuery(pgQuery);
-// //Unbox result
-// pgResult = pgResult instanceof PGobject ? ((PGobject) pgResult).getValue() : pgResult;
-// pgResult = pgResult == null ? "" : pgResult.toString();
-//
-// CreateTableDDL pg = new PostgresDDLFactory().createTable(
-// new EngineSink("pg", new int[]{0}, relNode.getRowType(), OptionalInt.of(0), null));
-//
-// snapshot.addContent((String) pgResult, "Postgres Result");
-//
-// RelNode flinkRelNode = framework.getQueryPlanner().convertRelToDialect(Dialect.FLINK,
-// relNode);
-// String query = framework.getQueryPlanner().relToString(Dialect.FLINK, flinkRelNode).getSql();
-// snapshot.addContent(query, "flink");
-//
-// Object flinkResult = jsonFunctionTest(query);
-// if (flinkResult instanceof FlinkJsonType) {
-// flinkResult = ((FlinkJsonType) flinkResult).getJson();
-// }
-// flinkResult = flinkResult == null ? "" : flinkResult.toString();
-// snapshot.addContent((String) flinkResult, "Flink Result");
-// return Pair.of(pgResult, flinkResult);
-// }
-//
-// //todo: Hacky way to get different in-mem sources to load
-// @AutoService(SourceFactory.class)
-// public static class InMemJson extends InMemSourceFactory {
-//
-// static Map> tableData = Map.of("data",
-// List.of(new JsonData(1, "{\"example\":[1,2,3]}"),
-// new JsonData(2, "{\"example\":[4,5,6]}")));
-//
-// public InMemJson() {
-// super("data", tableData);
-// }
-// }
-//
-// @Value
-// public static class JsonData {
-//
-// int id;
-// String json;
-// }
-//
-// @AutoService(DataSystemConnectorFactory.class)
-// public static class InMemJsonConnector extends MemoryConnectorFactory {
-//
-// public InMemJsonConnector() {
-// super("data");
-// }
-// }
-// }
diff --git a/sqrl-json/src/test/java/com/datasqrl/json/JsonFunctionsTest.java b/sqrl-json/src/test/java/com/datasqrl/json/JsonFunctionsTest.java
deleted file mode 100644
index 32fe343..0000000
--- a/sqrl-json/src/test/java/com/datasqrl/json/JsonFunctionsTest.java
+++ /dev/null
@@ -1,513 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import static org.junit.jupiter.api.Assertions.*;
-
-import lombok.SneakyThrows;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.flink.types.Row;
-import org.junit.jupiter.api.Nested;
-import org.junit.jupiter.api.Test;
-
-class JsonFunctionsTest {
- ObjectMapper mapper = new ObjectMapper();
-
- @SneakyThrows
- JsonNode readTree(String val) {
- return mapper.readTree(val);
- }
-
- @Nested
- class ToJsonTest {
-
- @Test
- void testUnicodeJson() {
- Row row = Row.withNames();
- row.setField("key", "”value”");
- Row[] rows = new Row[] {row};
- FlinkJsonType result = JsonFunctions.TO_JSON.eval(rows);
- assertNotNull(result);
- assertEquals("[{\"key\":\"”value”\"}]", result.getJson().toString());
- }
-
- @Test
- void testValidJson() {
- String json = "{\"key\":\"value\"}";
- FlinkJsonType result = JsonFunctions.TO_JSON.eval(json);
- assertNotNull(result);
- assertEquals(json, result.getJson().toString());
- }
-
- @Test
- void testInvalidJson() {
- String json = "Not a JSON";
- FlinkJsonType result = JsonFunctions.TO_JSON.eval(json);
- assertNull(result);
- }
-
- @Test
- void testNullInput() {
- assertNull(JsonFunctions.TO_JSON.eval(null));
- }
- }
-
- @Nested
- class JsonToStringTest {
-
- @Test
- void testNonNullJson() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String result = JsonFunctions.JSON_TO_STRING.eval(json);
- assertEquals("{\"key\":\"value\"}", result);
- }
-
- @Test
- void testNullJson() {
- String result = JsonFunctions.JSON_TO_STRING.eval(null);
- assertNull(result);
- }
- }
-
- @Nested
- class JsonObjectTest {
-
- @Test
- void testValidKeyValuePairs() {
- FlinkJsonType result = JsonFunctions.JSON_OBJECT.eval("key1", "value1", "key2", "value2");
- assertNotNull(result);
- assertEquals("{\"key1\":\"value1\",\"key2\":\"value2\"}", result.getJson().toString());
- }
-
- @Test
- void testInvalidNumberOfArguments() {
- assertThrows(
- IllegalArgumentException.class,
- () -> JsonFunctions.JSON_OBJECT.eval("key1", "value1", "key2"));
- }
-
- @Test
- void testNullKeyOrValue() {
- FlinkJsonType resultWithNullValue = JsonFunctions.JSON_OBJECT.eval("key1", null);
- assertNotNull(resultWithNullValue);
- assertEquals("{\"key1\":null}", resultWithNullValue.getJson().toString());
- }
- }
-
- @Nested
- class JsonArrayTest {
-
- @Test
- void testArrayWithJsonObjects() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key1\": \"value1\"}"));
- FlinkJsonType json2 = new FlinkJsonType(readTree("{\"key2\": \"value2\"}"));
- FlinkJsonType result = JsonFunctions.JSON_ARRAY.eval(json1, json2);
- assertNotNull(result);
- assertEquals("[{\"key1\":\"value1\"},{\"key2\":\"value2\"}]", result.getJson().toString());
- }
-
- @Test
- void testArrayWithMixedTypes() {
- FlinkJsonType result = JsonFunctions.JSON_ARRAY.eval("stringValue", 123, true);
- assertNotNull(result);
- assertEquals("[\"stringValue\",123,true]", result.getJson().toString());
- }
-
- @Test
- void testArrayWithNullValues() {
- FlinkJsonType result = JsonFunctions.JSON_ARRAY.eval((Object) null);
- assertNotNull(result);
- assertEquals("[null]", result.getJson().toString());
- }
- }
-
- @Nested
- class JsonExtractTest {
-
- @Test
- void testValidPath() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String result = JsonFunctions.JSON_EXTRACT.eval(json, "$.key");
- assertEquals("value", result);
- }
-
- @Test
- void testValidPathBoolean() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": true}"));
- String result = JsonFunctions.JSON_EXTRACT.eval(json, "$.key");
- assertEquals("true", result);
- }
-
- // Testing eval method with a default value for String
- @Test
- void testStringPathWithDefaultValue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String defaultValue = "default";
- String result = JsonFunctions.JSON_EXTRACT.eval(json, "$.nonexistentKey", defaultValue);
- assertEquals(defaultValue, result);
- }
-
- // Testing eval method with a default value for boolean
- @Test
- void testBooleanPathNormalWithDefaultValue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": true}"));
- boolean defaultValue = false;
- boolean result = JsonFunctions.JSON_EXTRACT.eval(json, "$.key", defaultValue);
- assertTrue(result);
- }
-
- @Test
- void testBooleanPathWithDefaultValue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": true}"));
- boolean defaultValue = false;
- boolean result = JsonFunctions.JSON_EXTRACT.eval(json, "$.nonexistentKey", defaultValue);
- assertFalse(result);
- }
-
- // Testing eval method with a default value for boolean:false
- @Test
- void testBooleanPathWithDefaultValueTrue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": true}"));
- boolean defaultValue = true;
- boolean result = JsonFunctions.JSON_EXTRACT.eval(json, "$.nonexistentKey", defaultValue);
- assertTrue(result);
- }
-
- // Testing eval method with a default value for Double
- @Test
- void testDoublePathWithDefaultValue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": 1.23}"));
- Double defaultValue = 4.56;
- Double result = JsonFunctions.JSON_EXTRACT.eval(json, "$.key", defaultValue);
- assertEquals(1.23, result);
- }
-
- // Testing eval method with a default value for Integer
- @Test
- void testIntegerPathWithDefaultValue() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": 123}"));
- Integer defaultValue = 456;
- Integer result = JsonFunctions.JSON_EXTRACT.eval(json, "$.key", defaultValue);
- assertEquals(123, result);
- }
-
- @Test
- void testInvalidPath() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String result = JsonFunctions.JSON_EXTRACT.eval(json, "$.nonexistentKey");
- assertNull(result);
- }
- }
-
- @Nested
- class JsonQueryTest {
-
- @Test
- void testValidQuery() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String result = JsonFunctions.JSON_QUERY.eval(json, "$.key");
- assertEquals("\"value\"", result); // Note the JSON representation of a string value
- }
-
- // Test for a more complex JSON path query
- @Test
- void testComplexQuery() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key1\": {\"key2\": \"value\"}}"));
- String result = JsonFunctions.JSON_QUERY.eval(json, "$.key1.key2");
- assertEquals("\"value\"", result); // JSON representation of the result
- }
-
- // Test for an invalid query
- @Test
- void testInvalidQuery() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- String result = JsonFunctions.JSON_QUERY.eval(json, "$.invalidKey");
- assertNull(result);
- }
- }
-
- @Nested
- class JsonExistsTest {
-
- @Test
- void testPathExists() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- Boolean result = JsonFunctions.JSON_EXISTS.eval(json, "$.key");
- assertTrue(result);
- }
-
- // Test for a path that exists
- @Test
- void testPathExistsComplex() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key1\": {\"key2\": \"value\"}}"));
- Boolean result = JsonFunctions.JSON_EXISTS.eval(json, "$.key1.key2");
- assertTrue(result);
- }
-
- @Test
- void testPathDoesNotExistComplex() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key1\": {\"key2\": \"value\"}}"));
- Boolean result = JsonFunctions.JSON_EXISTS.eval(json, "$.key1.nonexistentKey");
- assertFalse(result);
- }
-
- @Test
- void testPathDoesNotExist() {
- FlinkJsonType json = new FlinkJsonType(readTree("{\"key\": \"value\"}"));
- Boolean result = JsonFunctions.JSON_EXISTS.eval(json, "$.nonexistentKey");
- assertFalse(result);
- }
-
- @Test
- void testNullInput() {
- Boolean result = JsonFunctions.JSON_EXISTS.eval(null, "$.key");
- assertNull(result);
- }
- }
-
- @Nested
- class JsonConcatTest {
-
- @Test
- void testSimpleMerge() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key1\": \"value1\"}"));
- FlinkJsonType json2 = new FlinkJsonType(readTree("{\"key2\": \"value2\"}"));
- FlinkJsonType result = JsonFunctions.JSON_CONCAT.eval(json1, json2);
- assertEquals("{\"key1\":\"value1\",\"key2\":\"value2\"}", result.getJson().toString());
- }
-
- @Test
- void testOverlappingKeys() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key\": \"value1\"}"));
- FlinkJsonType json2 = new FlinkJsonType(readTree("{\"key\": \"value2\"}"));
- FlinkJsonType result = JsonFunctions.JSON_CONCAT.eval(json1, json2);
- assertEquals("{\"key\":\"value2\"}", result.getJson().toString());
- }
-
- @Test
- void testNullInput() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key1\": \"value1\"}"));
- FlinkJsonType result = JsonFunctions.JSON_CONCAT.eval(json1, null);
- assertNull(result);
- }
-
- @Test
- void testNullInput2() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key1\": \"value1\"}"));
- FlinkJsonType result = JsonFunctions.JSON_CONCAT.eval(null, json1);
- assertNull(result);
- }
- }
-
- @Nested
- class JsonArrayAggTest {
-
- @Test
- void testAggregateJsonTypes() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- JsonFunctions.JSON_ARRAYAGG.accumulate(
- accumulator, new FlinkJsonType(readTree("{\"key1\": \"value1\"}")));
- JsonFunctions.JSON_ARRAYAGG.accumulate(
- accumulator, new FlinkJsonType(readTree("{\"key2\": \"value2\"}")));
-
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("[{\"key1\":\"value1\"},{\"key2\":\"value2\"}]", result.getJson().toString());
- }
-
- @Test
- void testAggregateMixedTypes() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, "stringValue");
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, 123);
-
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("[\"stringValue\",123]", result.getJson().toString());
- }
-
- @Test
- void testAccumulateNullValues() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, (FlinkJsonType) null);
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertEquals("[null]", result.getJson().toString());
- }
-
- @Test
- void testArrayWithNullElements() {
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key1\": \"value1\"}"));
- FlinkJsonType json2 = null; // null JSON object
- FlinkJsonType result = JsonFunctions.JSON_ARRAY.eval(json1, json2);
- assertNotNull(result);
- // Depending on implementation, the result might include the null or ignore it
- assertEquals("[{\"key1\":\"value1\"},null]", result.getJson().toString());
- }
-
- @Test
- void testRetractJsonTypes() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key\": \"value1\"}"));
- FlinkJsonType json2 = new FlinkJsonType(readTree("{\"key\": \"value2\"}"));
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, json1);
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, json2);
-
- // Now retract one of the JSON objects
- JsonFunctions.JSON_ARRAYAGG.retract(accumulator, json1);
-
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("[{\"key\":\"value2\"}]", result.getJson().toString());
- }
-
- @Test
- void testRetractNullJsonType() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key\": \"value1\"}"));
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, json1);
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, (FlinkJsonType) null);
-
- // Now retract a null JSON object
- JsonFunctions.JSON_ARRAYAGG.retract(accumulator, (FlinkJsonType) null);
-
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("[{\"key\":\"value1\"}]", result.getJson().toString());
- }
-
- @Test
- void testRetractNullFromNonExisting() {
- ArrayAgg accumulator = JsonFunctions.JSON_ARRAYAGG.createAccumulator();
- FlinkJsonType json1 = new FlinkJsonType(readTree("{\"key\": \"value1\"}"));
- JsonFunctions.JSON_ARRAYAGG.accumulate(accumulator, json1);
-
- // Attempt to retract a null value that was never accumulated
- JsonFunctions.JSON_ARRAYAGG.retract(accumulator, (FlinkJsonType) null);
-
- FlinkJsonType result = JsonFunctions.JSON_ARRAYAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("[{\"key\":\"value1\"}]", result.getJson().toString());
- }
- }
-
- @Nested
- class JsonObjectAggTest {
-
- @Test
- void testAggregateJsonTypes() {
- ObjectAgg accumulator = JsonFunctions.JSON_OBJECTAGG.createAccumulator();
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key1", new FlinkJsonType(readTree("{\"nestedKey1\": \"nestedValue1\"}")));
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key2", new FlinkJsonType(readTree("{\"nestedKey2\": \"nestedValue2\"}")));
-
- FlinkJsonType result = JsonFunctions.JSON_OBJECTAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals(
- "{\"key1\":{\"nestedKey1\":\"nestedValue1\"},\"key2\":{\"nestedKey2\":\"nestedValue2\"}}",
- result.getJson().toString());
- }
-
- @Test
- void testAggregateWithOverwritingKeys() {
- ObjectAgg accumulator = JsonFunctions.JSON_OBJECTAGG.createAccumulator();
- JsonFunctions.JSON_OBJECTAGG.accumulate(accumulator, "key", "value1");
- JsonFunctions.JSON_OBJECTAGG.accumulate(accumulator, "key", "value2");
-
- FlinkJsonType result = JsonFunctions.JSON_OBJECTAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals(
- "{\"key\":\"value2\"}",
- result.getJson().toString()); // The last value for the same key should be retained
- }
-
- @Test
- void testNullKey() {
- assertThrows(
- IllegalArgumentException.class, () -> JsonFunctions.JSON_OBJECT.eval(null, "value1"));
- }
-
- @Test
- void testNullValue() {
- FlinkJsonType result = JsonFunctions.JSON_OBJECT.eval("key1", null);
- assertNotNull(result);
- assertEquals("{\"key1\":null}", result.getJson().toString());
- }
-
- @Test
- void testNullKeyValue() {
- assertThrows(
- IllegalArgumentException.class, () -> JsonFunctions.JSON_OBJECT.eval(null, null));
- }
-
- @Test
- void testArrayOfNullValues() {
- FlinkJsonType result =
- JsonFunctions.JSON_OBJECT.eval("key1", new Object[] {null, null, null});
- assertNotNull(result);
- // The expected output might vary based on how the function is designed to handle this case
- assertEquals("{\"key1\":[null,null,null]}", result.getJson().toString());
- }
-
- @Test
- void testRetractJsonTypes() {
- ObjectAgg accumulator = JsonFunctions.JSON_OBJECTAGG.createAccumulator();
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key1", new FlinkJsonType(readTree("{\"nestedKey1\": \"nestedValue1\"}")));
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key2", new FlinkJsonType(readTree("{\"nestedKey2\": \"nestedValue2\"}")));
-
- // Now retract a key-value pair
- JsonFunctions.JSON_OBJECTAGG.retract(
- accumulator, "key1", new FlinkJsonType(readTree("{\"nestedKey1\": \"nestedValue1\"}")));
-
- FlinkJsonType result = JsonFunctions.JSON_OBJECTAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("{\"key2\":{\"nestedKey2\":\"nestedValue2\"}}", result.getJson().toString());
- }
-
- @Test
- void testRetractNullJsonValue() {
- ObjectAgg accumulator = JsonFunctions.JSON_OBJECTAGG.createAccumulator();
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key1", new FlinkJsonType(readTree("{\"nestedKey1\": \"nestedValue1\"}")));
- JsonFunctions.JSON_OBJECTAGG.accumulate(accumulator, "key2", (FlinkJsonType) null);
-
- // Now retract a null value
- JsonFunctions.JSON_OBJECTAGG.retract(accumulator, "key2", (FlinkJsonType) null);
-
- FlinkJsonType result = JsonFunctions.JSON_OBJECTAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("{\"key1\":{\"nestedKey1\":\"nestedValue1\"}}", result.getJson().toString());
- }
-
- @Test
- void testRetractNullKey() {
- ObjectAgg accumulator = JsonFunctions.JSON_OBJECTAGG.createAccumulator();
- JsonFunctions.JSON_OBJECTAGG.accumulate(
- accumulator, "key1", new FlinkJsonType(readTree("{\"nestedKey1\": \"nestedValue1\"}")));
- JsonFunctions.JSON_OBJECTAGG.accumulate(accumulator, null, "someValue");
-
- // Attempt to retract a key-value pair where the key is null
- JsonFunctions.JSON_OBJECTAGG.retract(accumulator, null, "someValue");
-
- FlinkJsonType result = JsonFunctions.JSON_OBJECTAGG.getValue(accumulator);
- assertNotNull(result);
- assertEquals("{\"key1\":{\"nestedKey1\":\"nestedValue1\"}}", result.getJson().toString());
- }
- }
-}
diff --git a/sqrl-lib-common/pom.xml b/sqrl-lib-common/pom.xml
deleted file mode 100644
index 8aff64e..0000000
--- a/sqrl-lib-common/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-lib-common
-
- Common classes for sqrl flink libs
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-common
- 1.19.0
- provided
-
-
- com.google.auto.service
- auto-service
- 1.1.1
-
-
-
-
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/HashColumns.java b/sqrl-lib-common/src/main/java/com/datasqrl/datatype/HashColumns.java
deleted file mode 100644
index ac4602b..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/HashColumns.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.datatype;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.nio.charset.StandardCharsets;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.Objects;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.InputTypeStrategies;
-import org.apache.flink.table.types.inference.InputTypeStrategy;
-import org.apache.flink.table.types.inference.TypeInference;
-import org.apache.flink.table.types.inference.TypeStrategies;
-
-@AutoService(StandardLibraryFunction.class)
-public class HashColumns extends ScalarFunction implements StandardLibraryFunction {
-
- public String eval(Object... objects) {
- if (objects.length == 0) return "";
- try {
- MessageDigest digest = MessageDigest.getInstance("MD5"); // Changed to MD5
- for (Object obj : objects) {
- int hash = Objects.hashCode(obj); // to handle null objects
- digest.update(Integer.toString(hash).getBytes(StandardCharsets.UTF_8));
- }
-
- byte[] hashBytes = digest.digest();
- StringBuilder hexString = new StringBuilder(2 * hashBytes.length);
- for (byte b : hashBytes) {
- String hex = Integer.toHexString(0xff & b);
- if (hex.length() == 1) {
- hexString.append('0');
- }
- hexString.append(hex);
- }
- return hexString.toString();
- } catch (NoSuchAlgorithmException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- InputTypeStrategy inputTypeStrategy =
- InputTypeStrategies.compositeSequence().finishWithVarying(InputTypeStrategies.WILDCARD);
-
- return TypeInference.newBuilder()
- .inputTypeStrategy(inputTypeStrategy)
- .outputTypeStrategy(TypeStrategies.explicit(DataTypes.CHAR(32)))
- .build();
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/Noop.java b/sqrl-lib-common/src/main/java/com/datasqrl/datatype/Noop.java
deleted file mode 100644
index 3836c42..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/Noop.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.datatype;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.InputTypeStrategies;
-import org.apache.flink.table.types.inference.InputTypeStrategy;
-import org.apache.flink.table.types.inference.TypeInference;
-import org.apache.flink.table.types.inference.TypeStrategies;
-
-@AutoService(StandardLibraryFunction.class)
-public class Noop extends ScalarFunction implements StandardLibraryFunction {
-
- public boolean eval(Object... objects) {
- return true;
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- InputTypeStrategy inputTypeStrategy =
- InputTypeStrategies.compositeSequence().finishWithVarying(InputTypeStrategies.WILDCARD);
-
- return TypeInference.newBuilder()
- .inputTypeStrategy(inputTypeStrategy)
- .outputTypeStrategy(TypeStrategies.explicit(DataTypes.BOOLEAN()))
- .build();
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/SerializeToBytes.java b/sqrl-lib-common/src/main/java/com/datasqrl/datatype/SerializeToBytes.java
deleted file mode 100644
index e1c5a49..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/datatype/SerializeToBytes.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.datatype;
-
-import com.datasqrl.function.SqrlCastFunction;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import lombok.SneakyThrows;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.core.memory.DataOutputSerializer;
-import org.apache.flink.table.annotation.DataTypeHint;
-import org.apache.flink.table.annotation.InputGroup;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** Converts an annotated data type to */
-@AutoService(StandardLibraryFunction.class)
-public class SerializeToBytes extends ScalarFunction
- implements StandardLibraryFunction, SqrlCastFunction {
-
- @SneakyThrows
- public byte[] eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object object) {
- DataTypeHint hint = object.getClass().getAnnotation(DataTypeHint.class);
- Class extends TypeSerializer> serializerClass = hint.rawSerializer();
-
- TypeSerializer serializer = serializerClass.newInstance();
-
- DataOutputSerializer dos = new DataOutputSerializer(128);
-
- serializer.serialize(object, dos);
-
- return dos.getCopyOfBuffer();
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/function/CommonFunctions.java b/sqrl-lib-common/src/main/java/com/datasqrl/function/CommonFunctions.java
deleted file mode 100644
index 7e79455..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/function/CommonFunctions.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.function;
-
-import com.datasqrl.datatype.HashColumns;
-import com.datasqrl.datatype.Noop;
-import com.datasqrl.datatype.SerializeToBytes;
-
-public class CommonFunctions {
-
- public static final SerializeToBytes SERIALIZE_TO_BYTES = new SerializeToBytes();
- public static final Noop NOOP = new Noop();
- public static final HashColumns HASH_COLUMNS = new HashColumns();
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/function/FlinkTypeUtil.java b/sqrl-lib-common/src/main/java/com/datasqrl/function/FlinkTypeUtil.java
deleted file mode 100644
index 3cbb130..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/function/FlinkTypeUtil.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.function;
-
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import lombok.Builder;
-import lombok.Singular;
-import lombok.SneakyThrows;
-import lombok.Value;
-import org.apache.flink.table.functions.FunctionDefinition;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.inference.ArgumentCount;
-import org.apache.flink.table.types.inference.CallContext;
-import org.apache.flink.table.types.inference.InputTypeStrategy;
-import org.apache.flink.table.types.inference.Signature;
-import org.apache.flink.table.types.inference.TypeInference;
-import org.apache.flink.table.types.inference.TypeStrategy;
-import org.apache.flink.table.types.inference.utils.AdaptedCallContext;
-
-public class FlinkTypeUtil {
-
- public static TypeStrategy nullPreservingOutputStrategy(DataType outputType) {
- return callContext -> {
- DataType type = getFirstArgumentType(callContext);
-
- if (type.getLogicalType().isNullable()) {
- return Optional.of(outputType.nullable());
- }
-
- return Optional.of(outputType.notNull());
- };
- }
-
- public static TypeInference basicNullInference(DataType outputType, DataType inputType) {
- return TypeInference.newBuilder()
- .typedArguments(inputType)
- .outputTypeStrategy(nullPreservingOutputStrategy(outputType))
- .build();
- }
-
- public static TypeInference.Builder basicNullInferenceBuilder(
- DataType outputType, DataType inputType) {
- return TypeInference.newBuilder()
- .typedArguments(inputType)
- .outputTypeStrategy(nullPreservingOutputStrategy(outputType));
- }
-
- @SneakyThrows
- public static DataType getFirstArgumentType(CallContext callContext) {
- if (callContext instanceof AdaptedCallContext) {
- Field privateField = AdaptedCallContext.class.getDeclaredField("originalContext");
- privateField.setAccessible(true);
- CallContext originalContext = (CallContext) privateField.get(callContext);
-
- return originalContext.getArgumentDataTypes().get(0);
- } else {
- return callContext.getArgumentDataTypes().get(0);
- }
- }
-
- @Value
- @Builder
- public static class VariableArguments implements InputTypeStrategy {
-
- @Singular List staticTypes;
- DataType variableType;
- int minVariableArguments;
- int maxVariableArguments;
-
- @Override
- public ArgumentCount getArgumentCount() {
- return new ArgumentCount() {
- @Override
- public boolean isValidCount(int count) {
- int variableCount = count - staticTypes.size();
- return variableCount >= minVariableArguments && variableCount <= maxVariableArguments;
- }
-
- @Override
- public Optional getMinCount() {
- return Optional.of(staticTypes.size() + minVariableArguments);
- }
-
- @Override
- public Optional getMaxCount() {
- return Optional.of(staticTypes.size() + maxVariableArguments);
- }
- };
- }
-
- @Override
- public Optional> inferInputTypes(
- CallContext callContext, boolean throwOnFailure) {
- int argCount = callContext.getArgumentDataTypes().size();
- int varArgs = argCount - staticTypes.size();
- if (varArgs < 0 || varArgs < minVariableArguments || varArgs > maxVariableArguments)
- return Optional.empty();
- ArrayList result = new ArrayList<>(argCount);
- result.addAll(staticTypes);
- for (int i = 0; i < varArgs; i++) {
- result.add(variableType);
- }
- return Optional.of(result);
- }
-
- @Override
- public List getExpectedSignatures(FunctionDefinition definition) {
- List arguments = new ArrayList<>(staticTypes.size() + 1);
- staticTypes.stream()
- .map(DataType::toString)
- .map(Signature.Argument::of)
- .forEach(arguments::add);
- arguments.add(Signature.Argument.of(variableType.toString() + "..."));
- return List.of(Signature.of(arguments));
- }
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/function/SqrlCastFunction.java b/sqrl-lib-common/src/main/java/com/datasqrl/function/SqrlCastFunction.java
deleted file mode 100644
index 976a77d..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/function/SqrlCastFunction.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.function;
-
-import org.apache.flink.table.functions.FunctionDefinition;
-
-/**
- * Marker interface for functions that are used by DataSQRL to down- and up-cast types when moving
- * data between engines
- */
-public interface SqrlCastFunction extends FunctionDefinition {}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonType.java b/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonType.java
deleted file mode 100644
index 9cf26cc..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonType.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
-import org.apache.flink.table.annotation.DataTypeHint;
-
-@DataTypeHint(
- value = "RAW",
- bridgedTo = FlinkJsonType.class,
- rawSerializer = FlinkJsonTypeSerializer.class)
-public class FlinkJsonType {
- public JsonNode json;
-
- public FlinkJsonType(JsonNode json) {
- this.json = json;
- }
-
- public JsonNode getJson() {
- return json;
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializer.java b/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializer.java
deleted file mode 100644
index 8d8b562..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializer.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import java.io.IOException;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
-import org.apache.flink.core.memory.DataInputView;
-import org.apache.flink.core.memory.DataOutputView;
-import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
-
-public class FlinkJsonTypeSerializer extends TypeSerializer {
-
- ObjectMapper mapper = new ObjectMapper();
-
- @Override
- public boolean isImmutableType() {
- return true;
- }
-
- @Override
- public FlinkJsonType createInstance() {
- return new FlinkJsonType(null);
- }
-
- @Override
- public FlinkJsonType copy(FlinkJsonType from) {
- return new FlinkJsonType(from.getJson());
- }
-
- @Override
- public FlinkJsonType copy(FlinkJsonType from, FlinkJsonType reuse) {
- return copy(from);
- }
-
- @Override
- public int getLength() {
- return -1; // indicates that this serializer does not have a fixed length
- }
-
- @Override
- public void serialize(FlinkJsonType record, DataOutputView target) throws IOException {
- byte[] jsonData = mapper.writeValueAsBytes(record.getJson());
- target.writeInt(jsonData.length);
- target.write(jsonData);
- }
-
- @Override
- public FlinkJsonType deserialize(DataInputView source) throws IOException {
- int length = source.readInt();
- byte[] jsonData = new byte[length];
- source.readFully(jsonData);
- return new FlinkJsonType(mapper.readTree(jsonData));
- }
-
- @Override
- public FlinkJsonType deserialize(FlinkJsonType reuse, DataInputView source) throws IOException {
- return deserialize(source);
- }
-
- @Override
- public void copy(DataInputView source, DataOutputView target) throws IOException {
- int length = source.readInt();
- byte[] jsonData = new byte[length];
- source.readFully(jsonData);
- target.writeInt(length);
- target.write(jsonData);
- }
-
- @Override
- public TypeSerializer duplicate() {
- return this;
- }
-
- @Override
- public boolean equals(Object obj) {
- return obj instanceof FlinkJsonTypeSerializer;
- }
-
- @Override
- public int hashCode() {
- return FlinkJsonTypeSerializer.class.hashCode();
- }
-
- @Override
- public TypeSerializerSnapshot snapshotConfiguration() {
- return new FlinkJsonTypeSerializerSnapshot();
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializerSnapshot.java b/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializerSnapshot.java
deleted file mode 100644
index 74e0ec1..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/json/FlinkJsonTypeSerializerSnapshot.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.json;
-
-import java.io.IOException;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility;
-import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
-import org.apache.flink.core.memory.DataInputView;
-import org.apache.flink.core.memory.DataOutputView;
-
-public class FlinkJsonTypeSerializerSnapshot implements TypeSerializerSnapshot {
-
- private Class serializerClass;
-
- public FlinkJsonTypeSerializerSnapshot() {
- this.serializerClass = FlinkJsonTypeSerializer.class;
- }
-
- @Override
- public int getCurrentVersion() {
- return 1;
- }
-
- @Override
- public void writeSnapshot(DataOutputView out) throws IOException {
- out.writeUTF(FlinkJsonTypeSerializer.class.getName());
- }
-
- @Override
- public void readSnapshot(int readVersion, DataInputView in, ClassLoader userCodeClassLoader)
- throws IOException {
- String className = in.readUTF();
- try {
- this.serializerClass =
- (Class) Class.forName(className, true, userCodeClassLoader);
- } catch (ClassNotFoundException e) {
- throw new IOException("Failed to find serializer class: " + className, e);
- }
- }
-
- @Override
- public TypeSerializer restoreSerializer() {
- try {
- return serializerClass.newInstance();
- } catch (InstantiationException | IllegalAccessException e) {
- throw new RuntimeException(
- "Failed to instantiate serializer class: " + serializerClass.getName(), e);
- }
- }
-
- @Override
- public TypeSerializerSchemaCompatibility resolveSchemaCompatibility(
- TypeSerializer newSerializer) {
- if (newSerializer.getClass() == this.serializerClass) {
- return TypeSerializerSchemaCompatibility.compatibleAsIs();
- } else {
- return TypeSerializerSchemaCompatibility.incompatible();
- }
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorType.java b/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorType.java
deleted file mode 100644
index 80ac4f8..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorType.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import org.apache.flink.table.annotation.DataTypeHint;
-
-@DataTypeHint(
- value = "RAW",
- bridgedTo = FlinkVectorType.class,
- rawSerializer = FlinkVectorTypeSerializer.class)
-public class FlinkVectorType {
- public double[] value;
-
- public FlinkVectorType(double[] value) {
- this.value = value;
- }
-
- public double[] getValue() {
- return value;
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializer.java b/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializer.java
deleted file mode 100644
index c44c9a8..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializer.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import java.io.IOException;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
-import org.apache.flink.core.memory.DataInputView;
-import org.apache.flink.core.memory.DataOutputView;
-
-public class FlinkVectorTypeSerializer extends TypeSerializer {
-
- @Override
- public boolean isImmutableType() {
- return true;
- }
-
- @Override
- public FlinkVectorType createInstance() {
- return new FlinkVectorType(null);
- }
-
- @Override
- public FlinkVectorType copy(FlinkVectorType from) {
- return new FlinkVectorType(from.getValue());
- }
-
- @Override
- public FlinkVectorType copy(FlinkVectorType from, FlinkVectorType reuse) {
- return copy(from);
- }
-
- @Override
- public int getLength() {
- return -1; // indicates that this serializer does not have a fixed length
- }
-
- @Override
- public void serialize(FlinkVectorType record, DataOutputView target) throws IOException {
- target.writeInt(record.getValue().length); // First write the length of the array
- for (double v : record.getValue()) {
- target.writeDouble(v); // Write each double value
- }
- }
-
- @Override
- public FlinkVectorType deserialize(DataInputView source) throws IOException {
- int length = source.readInt();
- double[] array = new double[length];
- for (int i = 0; i < length; i++) {
- array[i] = source.readDouble();
- }
- return new FlinkVectorType(array);
- }
-
- @Override
- public FlinkVectorType deserialize(FlinkVectorType reuse, DataInputView source)
- throws IOException {
- return deserialize(source);
- }
-
- @Override
- public void copy(DataInputView source, DataOutputView target) throws IOException {
- target.writeUTF(source.readUTF());
- }
-
- @Override
- public TypeSerializer duplicate() {
- return this;
- }
-
- @Override
- public boolean equals(Object obj) {
- return obj instanceof FlinkVectorTypeSerializer;
- }
-
- @Override
- public int hashCode() {
- return FlinkVectorTypeSerializer.class.hashCode();
- }
-
- @Override
- public TypeSerializerSnapshot snapshotConfiguration() {
- return new FlinkVectorTypeSerializerSnapshot();
- }
-}
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializerSnapshot.java b/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializerSnapshot.java
deleted file mode 100644
index 01224fa..0000000
--- a/sqrl-lib-common/src/main/java/com/datasqrl/vector/FlinkVectorTypeSerializerSnapshot.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import java.io.IOException;
-import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility;
-import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
-import org.apache.flink.core.memory.DataInputView;
-import org.apache.flink.core.memory.DataOutputView;
-
-public class FlinkVectorTypeSerializerSnapshot implements TypeSerializerSnapshot {
-
- private Class serializerClass;
-
- public FlinkVectorTypeSerializerSnapshot() {
- this.serializerClass = FlinkVectorTypeSerializer.class;
- }
-
- @Override
- public int getCurrentVersion() {
- return 1;
- }
-
- @Override
- public void writeSnapshot(DataOutputView out) throws IOException {
- out.writeUTF(FlinkVectorTypeSerializer.class.getName());
- }
-
- @Override
- public void readSnapshot(int readVersion, DataInputView in, ClassLoader userCodeClassLoader)
- throws IOException {
- String className = in.readUTF();
- try {
- this.serializerClass =
- (Class) Class.forName(className, true, userCodeClassLoader);
- } catch (ClassNotFoundException e) {
- throw new IOException("Failed to find serializer class: " + className, e);
- }
- }
-
- @Override
- public TypeSerializer restoreSerializer() {
- try {
- return serializerClass.newInstance();
- } catch (InstantiationException | IllegalAccessException e) {
- throw new RuntimeException(
- "Failed to instantiate serializer class: " + serializerClass.getName(), e);
- }
- }
-
- @Override
- public TypeSerializerSchemaCompatibility resolveSchemaCompatibility(
- TypeSerializer newSerializer) {
- if (newSerializer.getClass() == this.serializerClass) {
- return TypeSerializerSchemaCompatibility.compatibleAsIs();
- } else {
- return TypeSerializerSchemaCompatibility.incompatible();
- }
- }
-}
diff --git a/sqrl-secure/pom.xml b/sqrl-secure/pom.xml
deleted file mode 100644
index 121e32a..0000000
--- a/sqrl-secure/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-secure
-
- Secure functions for flink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-api-java-bridge
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
-
-
diff --git a/sqrl-secure/src/main/java/com/datasqrl/secure/README.md b/sqrl-secure/src/main/java/com/datasqrl/secure/README.md
deleted file mode 100644
index 377d979..0000000
--- a/sqrl-secure/src/main/java/com/datasqrl/secure/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-| Function Documentation |
-|-------------------------|
-| `RandomID(bigint) → string` Generate a random ID of the specified number of bytes. Example: `RandomID(16) → '3wJq7dJkQh5HztHWXcQeXQ'` |
-| `Uuid() → char(36)` Generate a random UUID. Example: `Uuid() → '550e8400-e29b-41d4-a716-446655440000'` |
\ No newline at end of file
diff --git a/sqrl-secure/src/main/java/com/datasqrl/secure/RandomID.java b/sqrl-secure/src/main/java/com/datasqrl/secure/RandomID.java
deleted file mode 100644
index 8887328..0000000
--- a/sqrl-secure/src/main/java/com/datasqrl/secure/RandomID.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.secure;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.security.SecureRandom;
-import java.util.Base64;
-import java.util.List;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/**
- * Generates a random ID string with the given number of secure random bytes. The bytes are base64
- * encoded so the string length will be longer than the number of bytes
- */
-@AutoService(StandardLibraryFunction.class)
-public class RandomID extends ScalarFunction implements StandardLibraryFunction {
-
- private static final SecureRandom random = new SecureRandom();
- private static final Base64.Encoder encoder = Base64.getUrlEncoder().withoutPadding();
-
- public String eval(Long numBytes) {
- if (numBytes == null) {
- return null;
- }
- assert numBytes >= 0;
- byte[] buffer = new byte[numBytes.intValue()];
- random.nextBytes(buffer);
- return encoder.encodeToString(buffer);
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return FlinkTypeUtil.basicNullInferenceBuilder(DataTypes.STRING(), DataTypes.BIGINT())
- .typedArguments(List.of(DataTypes.BIGINT()))
- .build();
- }
-}
diff --git a/sqrl-secure/src/main/java/com/datasqrl/secure/SecureFunctions.java b/sqrl-secure/src/main/java/com/datasqrl/secure/SecureFunctions.java
deleted file mode 100644
index 0568ddd..0000000
--- a/sqrl-secure/src/main/java/com/datasqrl/secure/SecureFunctions.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.secure;
-
-public class SecureFunctions {
-
- public static RandomID RANDOM_ID = new RandomID();
- public static Uuid UUID = new Uuid();
-}
diff --git a/sqrl-secure/src/main/java/com/datasqrl/secure/Uuid.java b/sqrl-secure/src/main/java/com/datasqrl/secure/Uuid.java
deleted file mode 100644
index 562b319..0000000
--- a/sqrl-secure/src/main/java/com/datasqrl/secure/Uuid.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.secure;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.util.Optional;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/** Generates a random UUID string */
-@AutoService(StandardLibraryFunction.class)
-public class Uuid extends ScalarFunction implements StandardLibraryFunction {
-
- public String eval() {
- return java.util.UUID.randomUUID().toString();
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .typedArguments()
- .outputTypeStrategy(callContext -> Optional.of(DataTypes.CHAR(36).notNull()))
- .build();
- }
-
- @Override
- public boolean isDeterministic() {
- return false;
- }
-}
diff --git a/sqrl-text/README.md b/sqrl-text/README.md
deleted file mode 100644
index 2e62101..0000000
--- a/sqrl-text/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-| Function Documentation |
-|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| `banned_words_filter( text STRING ) → BOOLEAN` Filter out text based on a predefined list of banned words. `banned_words_filter('This is some text') → FALSE` | |
-| `format( text STRING, [ arguments STRING ... ] ) → STRING` Format a string using the specified arguments. Optional parameters include `arguments` to be inserted into the format string. format('Hello %s', 'World') → 'Hello World'` |
-| `text_search( query STRING, texts STRING ... ) → DOUBLE` Computes a relevance score for a query against given texts. Optional parameters include multiple `texts` against which the query is compared. `text_search('find this', 'this is a string', 'this is another string') → 0.5` |
-| `banned_words_filter( text STRING ) → BOOLEAN` Filter out text based on a predefined list of banned words. `banned_words_filter('This is some text') → FALSE` |
-| `format( text STRING, [ arguments STRING ... ] ) → STRING` Format a string using the specified arguments. Optional parameters include `arguments` to be inserted into the format string. `format('Hello %s', 'World') → 'Hello World'` |
-| `text_search( query STRING, texts STRING ... ) → DOUBLE` Computes a relevance score for a query against given texts. Optional parameters include multiple `texts` against which the query is compared. `text_search('find this', 'this is a string', 'this is another string') → 0.5` |
\ No newline at end of file
diff --git a/sqrl-text/pom.xml b/sqrl-text/pom.xml
deleted file mode 100644
index 7919dcf..0000000
--- a/sqrl-text/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-text
-
- Text functions for flink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-api-java-bridge
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
-
-
diff --git a/sqrl-text/src/main/java/com/datasqrl/text/Format.java b/sqrl-text/src/main/java/com/datasqrl/text/Format.java
deleted file mode 100644
index 50b9a0d..0000000
--- a/sqrl-text/src/main/java/com/datasqrl/text/Format.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.text;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.FlinkTypeUtil.VariableArguments;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/** Replaces the placeholders in the first argument with the remaining arguments in order. */
-@AutoService(StandardLibraryFunction.class)
-public class Format extends ScalarFunction implements StandardLibraryFunction {
-
- public String eval(String text, String... arguments) {
- if (text == null) {
- return null;
- }
- return String.format(text, (Object[]) arguments);
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .inputTypeStrategy(
- VariableArguments.builder()
- .staticType(DataTypes.STRING())
- .variableType(DataTypes.STRING())
- .minVariableArguments(0)
- .maxVariableArguments(Integer.MAX_VALUE)
- .build())
- .outputTypeStrategy(FlinkTypeUtil.nullPreservingOutputStrategy(DataTypes.STRING()))
- .build();
- }
-}
diff --git a/sqrl-text/src/main/java/com/datasqrl/text/Split.java b/sqrl-text/src/main/java/com/datasqrl/text/Split.java
deleted file mode 100644
index 8ccda26..0000000
--- a/sqrl-text/src/main/java/com/datasqrl/text/Split.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.text;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/**
- * Returns an array of substrings by splitting the input string based on the given delimiter. If the
- * delimiter is not found in the string, the original string is returned as the only element in the
- * array. If the delimiter is empty, every character in the string is split. If the string or
- * delimiter is null, a null value is returned. If the delimiter is found at the beginning or end of
- * the string, or there are contiguous delimiters, then an empty string is added to the array.
- */
-@AutoService(StandardLibraryFunction.class)
-public class Split extends ScalarFunction implements StandardLibraryFunction {
-
- public String[] eval(String text, String delimiter) {
- if (text == null || delimiter == null) {
- return null;
- }
-
- if (delimiter.isEmpty()) {
- return text.split("");
- }
-
- return text.split(delimiter, -1);
- }
-}
diff --git a/sqrl-text/src/main/java/com/datasqrl/text/TextFunctions.java b/sqrl-text/src/main/java/com/datasqrl/text/TextFunctions.java
deleted file mode 100644
index f082b2a..0000000
--- a/sqrl-text/src/main/java/com/datasqrl/text/TextFunctions.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.text;
-
-public class TextFunctions {
-
- public static Split SPLIT = new Split();
- public static Format FORMAT = new Format();
- public static TextSearch TEXT_SEARCH = new TextSearch();
-}
diff --git a/sqrl-text/src/main/java/com/datasqrl/text/TextSearch.java b/sqrl-text/src/main/java/com/datasqrl/text/TextSearch.java
deleted file mode 100644
index 3bafdf2..0000000
--- a/sqrl-text/src/main/java/com/datasqrl/text/TextSearch.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.text;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.FlinkTypeUtil.VariableArguments;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.StringTokenizer;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/**
- * Returns a numeric score for how well the given query string matches the provided string text.
- * Returns 0 if there is no match. Use this function for full-text search.
- */
-@AutoService(StandardLibraryFunction.class)
-public class TextSearch extends ScalarFunction implements StandardLibraryFunction {
-
- public static void tokenizeTo(String text, Collection collection) {
- StringTokenizer tokenizer = new StringTokenizer(text);
- while (tokenizer.hasMoreTokens()) {
- collection.add(tokenizer.nextToken().trim().toLowerCase());
- }
- }
-
- public Double eval(String query, String... texts) {
- if (query == null) {
- return null;
- }
- List queryWords = new ArrayList<>();
- tokenizeTo(query, queryWords);
- if (queryWords.isEmpty()) {
- return 1.0;
- }
-
- Set searchWords = new HashSet<>();
- Arrays.stream(texts).forEach(text -> tokenizeTo(text, searchWords));
-
- double score = 0;
- for (String queryWord : queryWords) {
- if (searchWords.contains(queryWord)) {
- score += 1.0;
- }
- }
- return score / queryWords.size();
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .inputTypeStrategy(
- VariableArguments.builder()
- .staticType(DataTypes.STRING())
- .variableType(DataTypes.STRING())
- .minVariableArguments(1)
- .maxVariableArguments(256)
- .build())
- .outputTypeStrategy(FlinkTypeUtil.nullPreservingOutputStrategy(DataTypes.DOUBLE()))
- .build();
- }
-}
diff --git a/sqrl-text/src/main/resources/com/datasqrl/text/banned_words_list.txt b/sqrl-text/src/main/resources/com/datasqrl/text/banned_words_list.txt
deleted file mode 100644
index 79e0f69..0000000
--- a/sqrl-text/src/main/resources/com/datasqrl/text/banned_words_list.txt
+++ /dev/null
@@ -1,706 +0,0 @@
-====
- Copyright © 2024 DataSQRL (contact@datasqrl.com)
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-====
-
-a55
-a55hole
-aeolus
-ahole
-anal
-analprobe
-anilingus
-anus
-areola
-areole
-arian
-aryan
-ass
-assbang
-assbanged
-assbangs
-asses
-assfuck
-assfucker
-assh0le
-asshat
-assho1e
-ass hole
-assholes
-assmaster
-assmunch
-asswipe
-asswipes
-azazel
-azz
-b1tch
-ballsack
-banger
-barf
-bastard
-bastards
-bawdy
-beaner
-beardedclam
-beastiality
-beatch
-beater
-beeyotch
-beotch
-biatch
-bigtits
-big tits
-bimbo
-bitch
-bitched
-bitches
-bitchy
-blow job
-blowjob
-blowjobs
-bod
-bodily
-boink
-bollock
-bollocks
-bollok
-bone
-boned
-boner
-boners
-bong
-boob
-boobies
-boobs
-booby
-booger
-bookie
-bootee
-bootie
-booty
-booze
-boozer
-boozy
-bosom
-bosomy
-bowel
-bowels
-brassiere
-bugger
-bukkake
-bullshit
-bull shit
-bullshits
-bullshitted
-bullturds
-bung
-busty
-butt
-butt fuck
-buttfuck
-buttfucker
-buttfucker
-buttplug
-c.0.c.k
-c.o.c.k.
-c.u.n.t
-c0ck
-c-0-c-k
-caca
-cahone
-cameltoe
-carpetmuncher
-cawk
-cervix
-chinc
-chincs
-chink
-chink
-chode
-chodes
-cl1t
-climax
-clit
-clitoris
-clitorus
-clits
-clitty
-cocain
-cocaine
-cock
-c-o-c-k
-cockblock
-cockholster
-cockknocker
-cocks
-cocksmoker
-cocksucker
-cock sucker
-coital
-commie
-condom
-coon
-coons
-corksucker
-crabs
-cracker
-crackwhore
-crap
-crappy
-cum
-cummin
-cumming
-cumshot
-cumshots
-cumslut
-cumstain
-cunilingus
-cunnilingus
-cunny
-cunt
-cunt
-c-u-n-t
-cuntface
-cunthunter
-cuntlick
-cuntlicker
-cunts
-d0ng
-d0uch3
-d0uche
-d1ck
-d1ld0
-d1ldo
-dago
-dagos
-dammit
-damn
-damned
-damnit
-dawgie-style
-dick
-dickbag
-dickdipper
-dickface
-dickflipper
-dickhead
-dickheads
-dickish
-dick-ish
-dickripper
-dicksipper
-dickweed
-dickwhipper
-dickzipper
-diddle
-dike
-dildo
-dildos
-diligaf
-dillweed
-dimwit
-dingle
-dipship
-doggie-style
-doggy-style
-dong
-doofus
-doosh
-dopey
-douch3
-douche
-douchebag
-douchebags
-douchey
-drunk
-dumass
-dumbass
-dumbasses
-dummy
-dyke
-dykes
-ejaculate
-erect
-erection
-erotic
-essohbee
-extacy
-extasy
-f.u.c.k
-fack
-fag
-fagg
-fagged
-faggit
-faggot
-fagot
-fags
-faig
-faigt
-fannybandit
-fart
-fartknocker
-fat
-felch
-felcher
-felching
-fellate
-fellatio
-feltch
-feltcher
-fisted
-fisting
-fisty
-floozy
-foad
-fondle
-foobar
-freex
-frigg
-frigga
-fubar
-fuck
-f-u-c-k
-fuckass
-fucked
-fucked
-fucker
-fuckface
-fuckin
-fucking
-fucknugget
-fucknut
-fuckoff
-fucks
-fucktard
-fuck-tard
-fuckup
-fuckwad
-fuckwit
-fudgepacker
-fuk
-fvck
-fxck
-gae
-gai
-ganja
-gfy
-ghay
-ghey
-gigolo
-glans
-goatse
-godamn
-godamnit
-goddam
-goddammit
-goddamn
-goldenshower
-gonad
-gonads
-gook
-gooks
-gringo
-gspot
-g-spot
-gtfo
-guido
-h0m0
-h0mo
-handjob
-hard on
-he11
-hebe
-heeb
-hell
-hemp
-heroin
-herp
-herpes
-herpy
-hitler
-hiv
-hobag
-hom0
-homey
-homo
-homoey
-honky
-hooch
-hookah
-hooker
-hoor
-hootch
-hooter
-hooters
-horny
-hump
-humped
-humping
-hussy
-hymen
-inbred
-incest
-injun
-j3rk0ff
-jackass
-jackhole
-jackoff
-jap
-japs
-jerk
-jerk0ff
-jerked
-jerkoff
-jism
-jiz
-jizm
-jizz
-jizzed
-junkie
-junky
-kike
-kikes
-kill
-kinky
-kkk
-klan
-knobend
-kooch
-kooches
-kootch
-kraut
-kyke
-labia
-lech
-leper
-lesbo
-lesbos
-lez
-lezbian
-lezbians
-lezbo
-lezbos
-lezzie
-lezzies
-lezzy
-lmao
-lmfao
-loin
-loins
-lusty
-mams
-massa
-masterbate
-masterbating
-masterbation
-masturbate
-masturbating
-masturbation
-maxi
-menses
-meth
-m-fucking
-mofo
-molest
-moolie
-moron
-motherfucka
-motherfucker
-motherfucking
-mtherfucker
-mthrfucker
-mthrfucking
-muff
-muffdiver
-murder
-muthafuckaz
-muthafucker
-mutherfucker
-mutherfucking
-muthrfucking
-nad
-nads
-naked
-napalm
-nappy
-nazi
-nazism
-negro
-nigga
-niggah
-niggas
-niggaz
-nigger
-nigger
-niggers
-niggle
-niglet
-nimrod
-ninny
-nipple
-nooky
-nympho
-opiate
-opium
-oral
-orally
-organ
-orgasm
-orgasmic
-orgies
-orgy
-ovary
-ovum
-ovums
-p.u.s.s.y.
-paddy
-paki
-pastie
-pasty
-pcp
-pecker
-pedo
-pedophile
-pedophilia
-pedophiliac
-pee
-peepee
-penial
-penile
-penis
-perversion
-peyote
-phalli
-phallic
-phuck
-pillowbiter
-pimp
-pinko
-piss
-pissed
-pissoff
-piss-off
-pms
-polack
-pollock
-poon
-poontang
-porn
-porno
-pornography
-pot
-potty
-prick
-prig
-prostitute
-pube
-pubic
-pubis
-punkass
-punky
-puss
-pussies
-pussy
-pussypounder
-puto
-queaf
-queef
-queef
-queero
-quicky
-quim
-racy
-rape
-raped
-raper
-rapist
-raunch
-rectal
-rectum
-rectus
-reefer
-reetard
-reich
-retard
-retarded
-revue
-rimjob
-ritard
-rtard
-r-tard
-rum
-rump
-rumprammer
-ruski
-s.h.i.t.
-s.o.b.
-s0b
-sadism
-sadist
-scag
-scantily
-schizo
-schlong
-screw
-screwed
-scrog
-scrot
-scrote
-scrotum
-scrud
-scum
-seaman
-seamen
-semen
-sex
-sexual
-sh1t
-s-h-1-t
-shamedame
-shit
-s-h-i-t
-shite
-shiteater
-shitface
-shithead
-shithole
-shithouse
-shits
-shitt
-shitted
-shitter
-shitty
-shiz
-sissy
-skag
-skank
-slave
-sleaze
-sleazy
-slut
-slutdumper
-slutkiss
-sluts
-smegma
-smut
-smutty
-snatch
-sniper
-snuff
-s-o-b
-sodom
-souse
-soused
-sperm
-spic
-spick
-spik
-spiks
-spooge
-spunk
-stfu
-stiffy
-stoned
-strip
-stroke
-stupid
-suck
-sucked
-sucking
-sumofabiatch
-t1t
-tard
-tawdry
-teabagging
-teat
-terd
-teste
-testee
-testes
-testicle
-testis
-thrust
-thug
-tinkle
-tit
-titfuck
-titi
-tits
-tittiefucker
-titties
-titty
-tittyfuck
-tittyfucker
-toke
-toots
-tramp
-trashy
-tubgirl
-turd
-tush
-twat
-twats
-ugly
-undies
-unwed
-urinal
-urine
-uterus
-uzi
-vag
-vagina
-valium
-viagra
-virgin
-vixen
-voyeur
-vulgar
-vulva
-wad
-wang
-wank
-wanker
-wazoo
-wedgie
-weed
-weenie
-weewee
-weiner
-weirdo
-wench
-wetback
-wh0re
-wh0reface
-whitey
-whiz
-whoralicious
-whore
-whorealicious
-whored
-whoreface
-whorehopper
-whorehouse
-whores
-whoring
-wigger
-womb
-woody
-wop
-wtf
-x-rated
-xxx
-yeasty
-yobbo
-zoophile
\ No newline at end of file
diff --git a/sqrl-text/src/test/java/com/datasqrl/functions/text/StdTextLibraryTest.java b/sqrl-text/src/test/java/com/datasqrl/functions/text/StdTextLibraryTest.java
deleted file mode 100644
index 1571914..0000000
--- a/sqrl-text/src/test/java/com/datasqrl/functions/text/StdTextLibraryTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.functions.text;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import com.datasqrl.text.TextFunctions;
-import org.junit.jupiter.api.Test;
-
-public class StdTextLibraryTest {
-
- @Test
- public void testFormat() {
- String format = "Hello, %s";
- assertEquals("Hello, World", TextFunctions.FORMAT.eval(format, "World"));
- format = "Count: %s, %s, %s, %s";
- assertEquals("Count: 1, 2, 3, 4", TextFunctions.FORMAT.eval(format, "1", "2", "3", "4"));
- }
-
- @Test
- public void testSearch() {
- assertEquals(1.0 / 2, TextFunctions.TEXT_SEARCH.eval("Hello World", "hello john"));
- assertEquals(
- 1.0 / 2, TextFunctions.TEXT_SEARCH.eval("Hello World", "what a world we live in, john"));
- assertEquals(
- 1.0,
- TextFunctions.TEXT_SEARCH.eval("Hello World", "what a world we live in, john! Hello john"));
- assertEquals(
- 2.0 / 3,
- TextFunctions.TEXT_SEARCH.eval(
- "one two THREE", "we are counting", "one two four five six"));
- assertEquals(
- 1.0,
- TextFunctions.TEXT_SEARCH.eval(
- "one two THREE", "we are counting", "one two four five six", "three forty fiv"));
- assertEquals(
- 0,
- TextFunctions.TEXT_SEARCH.eval(
- "one two THREE", "what a world we live in, john!", " Hello john"));
- }
-}
diff --git a/sqrl-text/src/test/java/com/datasqrl/text/SplitTest.java b/sqrl-text/src/test/java/com/datasqrl/text/SplitTest.java
deleted file mode 100644
index b56e809..0000000
--- a/sqrl-text/src/test/java/com/datasqrl/text/SplitTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.text;
-
-import static org.junit.jupiter.api.Assertions.*;
-
-import org.junit.jupiter.api.Test;
-
-public class SplitTest {
-
- private final Split splitFunction = new Split();
-
- // Test for "Returns an array of substrings by splitting the input string based on the given
- // delimiter."
- @Test
- public void testSplitWithDelimiter() {
- String[] result = splitFunction.eval("apple,banana,cherry", ",");
- assertArrayEquals(new String[] {"apple", "banana", "cherry"}, result);
- }
-
- // Test for "If the delimiter is not found in the string, the original string is returned as the
- // only element in the array."
- @Test
- public void testSplitWithNoDelimiterInString() {
- String[] result = splitFunction.eval("apple", ",");
- assertArrayEquals(new String[] {"apple"}, result);
- }
-
- // Test for "If the delimiter is empty, every character in the string is split."
- @Test
- public void testSplitWithEmptyDelimiter() {
- String[] result = splitFunction.eval("apple", "");
- assertArrayEquals(new String[] {"a", "p", "p", "l", "e"}, result);
- }
-
- // Test for "If the string is null, a null value is returned."
- @Test
- public void testSplitWithNullText() {
- String[] result = splitFunction.eval(null, ",");
- assertNull(result);
- }
-
- // Test for "If the delimiter is null, a null value is returned."
- @Test
- public void testSplitWithNullDelimiter() {
- String[] result = splitFunction.eval("apple,banana,cherry", null);
- assertNull(result);
- }
-
- // Test for "If the delimiter is found at the beginning of the string, an empty string is added to
- // the array."
- @Test
- public void testSplitWithDelimiterAtBeginning() {
- String[] result = splitFunction.eval(",apple,banana,cherry", ",");
- assertArrayEquals(new String[] {"", "apple", "banana", "cherry"}, result);
- }
-
- // Test for "If the delimiter is found at the end of the string, an empty string is added to the
- // array."
- @Test
- public void testSplitWithDelimiterAtEnd() {
- String[] result = splitFunction.eval("apple,banana,cherry,", ",");
- assertArrayEquals(new String[] {"apple", "banana", "cherry", ""}, result);
- }
-
- // Test for "If there are contiguous delimiters, then an empty string is added to the array."
- @Test
- public void testSplitWithContiguousDelimiters() {
- String[] result = splitFunction.eval("apple,,banana,cherry", ",");
- assertArrayEquals(new String[] {"apple", "", "banana", "cherry"}, result);
- }
-}
diff --git a/sqrl-time/pom.xml b/sqrl-time/pom.xml
deleted file mode 100644
index 0829068..0000000
--- a/sqrl-time/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-time
-
- Time functions for flink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-api-java-bridge
- ${flink.version}
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
-
-
diff --git a/sqrl-time/readme.md b/sqrl-time/readme.md
deleted file mode 100644
index efca62a..0000000
--- a/sqrl-time/readme.md
+++ /dev/null
@@ -1,16 +0,0 @@
-| Function Documentation |
-|-------------------------|
-| `AtZone (timestamp with local time zone, string) → timestamp with local time zone` Convert a timestamp to a given time zone. Example: `AtZone('2021-01-01T00:00:00Z'::timestamptz, 'Europe/London') → 2021-01-01T00:00:00+00:00` |
-| `EndOfDay (timestamp with local time zone [, bigint, bigint]) → timestamp with local time zone` Get the end of the day for a given timestamp. Optional parameters: `multiple`, to specify multiple amounts, and `offset`, to specify the offset. Usage of these parameters adjusts the time calculations accordingly. Example: `EndOfDay('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-01T23:59:59.999999999Z` |
-| `EndOfHour (timestamp with local time zone, [, bigint, bigint]) → timestamp with local time zone` Get the end of the hour for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfHour('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-01T00:59:59.999999999Z` |
-| `EndOfMinute (timestamp with local time zone [, bigint, bigint]) → timestamp with local time zone` Get the end of the minute for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfMinute('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-01T00:00:59.999999999Z` |
-| `EndOfMonth (timestamp with local time zone, [, bigint, bigint]) → timestamp with local time zone` Get the end of the month for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfMonth('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-31T23:59:59.999999999Z` |
-| `EndOfSecond (timestamp with local time zone [, bigint, bigint]) → timestamp with local time zone` Get the end of the second for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfSecond('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-01T00:00:00.999999999Z` |
-| `EndOfWeek (timestamp with local time zone [, bigint, bigint]) → timestamp with local time zone` Get the end of the week for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfWeek('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-01-03T23:59:59.999999999Z` |
-| `EndOfYear (timestamp with local time zone [, bigint, bigint]) → timestamp with local time zone` Get the end of the year for a given timestamp. Optional parameters: `multiple` and `offset` adjust the time calculations accordingly. Example: `EndOfYear('2021-01-01T00:00:00Z'::timestamptz, 1, 0) → 2021-12-31T23:59:59.999999999Z` |
-| `EpochMilliToTimestamp (bigint) → timestamp with local time zone` Convert epoch milliseconds to a timestamp. Example: `EpochMilliToTimestamp(1610000000000::bigint) → 2021-01-07T06:13:20Z` |
-| `EpochToTimestamp (bigint) → timestamp with local time zone` Convert epoch seconds to a timestamp. Example: `EpochToTimestamp(1610000000::bigint) → 2021-01-07T06:13:20Z` |
-| `ParseTimestamp (string [, string]) → timestamp with local time zone` Parse a string to a timestamp using an optional format. If no format is specified, the ISO-8601 format is used by default. Example: `ParseTimestamp('2021-01-01T00:00:00Z', 'yyyy-MM-dd''T''HH:mm:ssXXX') → 2021-01-01T00:00:00Z` |
-| `TimestampToEpoch (timestamp with local time zone) → bigint` Convert a timestamp to epoch seconds. Example: `TimestampToEpoch('2021-01-01T00:00:00Z'::timestamptz) → 1609459200` |
-| `TimestampToEpochMilli (timestamp with local time zone) → bigint` Convert a timestamp to epoch milliseconds. Example: `TimestampToEpochMilli('2021-01-01T00:00:00Z'::timestamptz) → 1609459200000` |
-| `TimestampToString (timestamp with local time zone) → string` Convert a timestamp to a string representation. Example: `TimestampToString('2021-01-01T00:00:00Z'::timestamptz) → '2021-01-01T00:00:00Z'` |
\ No newline at end of file
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/AbstractEpochToTimestamp.java b/sqrl-time/src/main/java/com/datasqrl/time/AbstractEpochToTimestamp.java
deleted file mode 100644
index fc72a73..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/AbstractEpochToTimestamp.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.StandardLibraryFunction;
-import java.time.Instant;
-import lombok.AllArgsConstructor;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-@AllArgsConstructor
-public class AbstractEpochToTimestamp extends ScalarFunction implements StandardLibraryFunction {
-
- boolean isMilli;
-
- public Instant eval(Long l) {
- if (isMilli) {
- return Instant.ofEpochMilli(l.longValue());
- } else {
- return Instant.ofEpochSecond(l.longValue());
- }
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return FlinkTypeUtil.basicNullInference(
- DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3), DataTypes.BIGINT());
- }
- //
- // @Override
- // public String getDocumentation() {
- // Instant inst = DEFAULT_DOC_TIMESTAMP.truncatedTo(ChronoUnit.SECONDS);
- // long epoch = inst.toEpochMilli() / (isMilli ? 1 : 1000);
- // String functionCall = String.format("%s(%s)", getFunctionName(), epoch);
- // String result = this.eval(epoch).toString();
- // return String.format(
- // "Converts the epoch timestamp in %s to the corresponding timestamp. E.g. `%s`
- // returns the timestamp `%s`",
- // isMilli ? "milliseconds" : "seconds", functionCall, result);
- // }
-
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/AbstractTimestampToEpoch.java b/sqrl-time/src/main/java/com/datasqrl/time/AbstractTimestampToEpoch.java
deleted file mode 100644
index 0310183..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/AbstractTimestampToEpoch.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.StandardLibraryFunction;
-import java.time.Instant;
-import lombok.AllArgsConstructor;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-@AllArgsConstructor
-public abstract class AbstractTimestampToEpoch extends ScalarFunction
- implements StandardLibraryFunction {
-
- private final boolean isMilli;
-
- public Long eval(Instant instant) {
- long epoch = instant.toEpochMilli();
- if (!isMilli) {
- epoch = epoch / 1000;
- }
- return epoch;
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return FlinkTypeUtil.basicNullInference(
- DataTypes.BIGINT(), DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3));
- }
- //
- // @Override
- // public String getDocumentation() {
- // Instant DEFAULT_DOC_TIMESTAMP = Instant.parse("2023-03-12T18:23:34.083Z");
- // String functionCall = String.format("%s(%s(%s))",
- // getFunctionName(),
- // STRING_TO_TIMESTAMP.getFunctionName(),
- // DEFAULT_DOC_TIMESTAMP.toString());
- // String result = this.eval(DEFAULT_DOC_TIMESTAMP).toString();
- // return String.format("Returns the %s since epoch for the given timestamp. E.g. `%s`
- // returns the number `%s`",
- // isMilli?"milliseconds":"seconds",
- // functionCall, result);
- // }
-
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/AtZone.java b/sqrl-time/src/main/java/com/datasqrl/time/AtZone.java
deleted file mode 100644
index 2cd5150..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/AtZone.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.util.Optional;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.DataType;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/** Returns the timestamp at the given timezone. */
-@AutoService(StandardLibraryFunction.class)
-public class AtZone extends ScalarFunction implements StandardLibraryFunction {
-
- public ZonedDateTime eval(Instant instant, String zoneId) {
- return instant.atZone(ZoneId.of(zoneId));
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .typedArguments(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3), DataTypes.STRING())
- .outputTypeStrategy(
- callContext -> {
- DataType type = FlinkTypeUtil.getFirstArgumentType(callContext);
- if (type.getLogicalType().isNullable()) {
- return Optional.of(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3));
- }
-
- return Optional.of(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3));
- })
- .build();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfDay.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfDay.java
deleted file mode 100644
index c9792f4..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfDay.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.temporal.ChronoUnit;
-
-/**
- * Time window function that returns the end of day for the timestamp argument. E.g.
- * endOfDay(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-12T23:59:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfDay extends TimeTumbleWindowFunction {
-
- public EndOfDay() {
- super(ChronoUnit.DAYS, ChronoUnit.HOURS);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfHour.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfHour.java
deleted file mode 100644
index 627638f..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfHour.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.temporal.ChronoUnit;
-
-/**
- * Time window function that returns the end of hour for the timestamp argument. E.g.
- * endOfHour(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-12T18:59:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfHour extends TimeTumbleWindowFunction {
-
- public EndOfHour() {
- super(ChronoUnit.HOURS, ChronoUnit.MINUTES);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfMinute.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfMinute.java
deleted file mode 100644
index f39dd14..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfMinute.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.temporal.ChronoUnit;
-
-/**
- * Time window function that returns the end of minute for the timestamp argument. E.g.
- * endOfMinute(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-12T18:23:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfMinute extends TimeTumbleWindowFunction {
-
- public EndOfMinute() {
- super(ChronoUnit.MINUTES, ChronoUnit.SECONDS);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfMonth.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfMonth.java
deleted file mode 100644
index 7b6d65f..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfMonth.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.ZonedDateTime;
-import java.time.temporal.ChronoUnit;
-import java.time.temporal.TemporalAdjusters;
-
-/**
- * Time window function that returns the end of month for the timestamp argument. E.g.
- * endOfMonth(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-31T23:59:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfMonth extends TimeTumbleWindowFunction {
-
- public EndOfMonth() {
- super(ChronoUnit.MONTHS, ChronoUnit.DAYS);
- }
-
- public Instant eval(Instant instant, Long multiple, Long offset) {
- if (multiple == null) {
- multiple = 1L;
- }
- // Preconditions.checkArgument(multiple == 1,
- // "Time window width must be 1. Use endofDay instead for flexible window widths.");
- if (offset == null) {
- offset = 0L;
- }
- // Preconditions.checkArgument(offset >= 0 && offset <= 28, "Invalid offset in days: %s",
- // offset);
-
- ZonedDateTime time =
- ZonedDateTime.ofInstant(instant, ZoneOffset.UTC).truncatedTo(ChronoUnit.DAYS);
- if (time.getDayOfMonth() > offset) {
- time = time.with(TemporalAdjusters.firstDayOfNextMonth());
- } else {
- time = time.with(TemporalAdjusters.firstDayOfMonth());
- }
- time = time.plusDays(offset);
- return time.minusNanos(1).toInstant();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfSecond.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfSecond.java
deleted file mode 100644
index 0941b51..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfSecond.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.temporal.ChronoUnit;
-
-/**
- * Time window function that returns the end of second for the timestamp argument. E.g.
- * endOfSecond(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-12T18:23:34.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfSecond extends TimeTumbleWindowFunction {
-
- public EndOfSecond() {
- super(ChronoUnit.SECONDS, ChronoUnit.MILLIS);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfWeek.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfWeek.java
deleted file mode 100644
index 2ec4b04..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfWeek.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.ZonedDateTime;
-import java.time.temporal.ChronoUnit;
-
-/**
- * Time window function that returns the end of week for the timestamp argument. E.g.
- * endOfWeek(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-03-12T23:59:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfWeek extends TimeTumbleWindowFunction {
-
- public EndOfWeek() {
- super(ChronoUnit.WEEKS, ChronoUnit.DAYS);
- }
-
- @Override
- public Instant eval(Instant instant, Long multiple, Long offset) {
- if (multiple == null) {
- multiple = 1L;
- }
- // Preconditions.checkArgument(multiple == 1,
- // "Time window width must be 1. Use endofDay instead for flexible window widths.");
- if (offset == null) {
- offset = 0L;
- }
- // Preconditions.checkArgument(offset >= 0 && offset <= 6, "Invalid offset in days: %s",
- // offset);
-
- ZonedDateTime time = ZonedDateTime.ofInstant(instant, ZoneOffset.UTC);
- int daysToSubtract = time.getDayOfWeek().getValue() - 1 - offset.intValue();
- if (daysToSubtract < 0) {
- daysToSubtract = 7 + daysToSubtract;
- }
- return ZonedDateTime.ofInstant(instant, ZoneOffset.UTC)
- .truncatedTo(ChronoUnit.DAYS)
- .minusDays(daysToSubtract)
- .plus(1, timeUnit)
- .minusNanos(1)
- .toInstant();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EndOfYear.java b/sqrl-time/src/main/java/com/datasqrl/time/EndOfYear.java
deleted file mode 100644
index f67bdd0..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EndOfYear.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.ZonedDateTime;
-import java.time.temporal.ChronoUnit;
-import java.time.temporal.TemporalAdjusters;
-
-/**
- * Time window function that returns the end of year for the timestamp argument. E.g.
- * endOfYear(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the timestamp
- * 2023-12-31T23:59:59.999999999Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EndOfYear extends TimeTumbleWindowFunction {
-
- public EndOfYear() {
- super(ChronoUnit.YEARS, ChronoUnit.DAYS);
- }
-
- public Instant eval(Instant instant) {
- return ZonedDateTime.ofInstant(instant, ZoneOffset.UTC)
- .with(TemporalAdjusters.firstDayOfNextYear())
- .truncatedTo(ChronoUnit.DAYS)
- .minusNanos(1)
- .toInstant();
- }
-
- public Instant eval(Instant instant, Long multiple, Long offset) {
- if (multiple == null) {
- multiple = 1L;
- }
- // Preconditions.checkArgument(multiple > 0 && multiple < Integer.MAX_VALUE,
- // "Window width must be a positive integer value: %s", multiple);
- if (offset == null) {
- offset = 0L;
- }
- // Preconditions.checkArgument(offset >= 0 && offset < 365, "Invalid offset in days: %s",
- // offset);
-
- ZonedDateTime time =
- ZonedDateTime.ofInstant(instant, ZoneOffset.UTC).truncatedTo(ChronoUnit.DAYS);
- if (time.getDayOfYear() > offset) {
- time = time.with(TemporalAdjusters.firstDayOfNextYear());
- } else {
- time = time.with(TemporalAdjusters.firstDayOfYear());
- }
- int modulus = multiple.intValue();
- int yearsToAdd = (modulus - time.getYear() % modulus) % modulus;
-
- time = time.plusYears(yearsToAdd).plusDays(offset);
- return time.minusNanos(1).toInstant();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EpochMilliToTimestamp.java b/sqrl-time/src/main/java/com/datasqrl/time/EpochMilliToTimestamp.java
deleted file mode 100644
index 1efe285..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EpochMilliToTimestamp.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-
-/**
- * Converts the epoch timestamp in milliseconds to the corresponding timestamp. E.g.
- * epochMilliToTimestamp(1678645414000) returns the timestamp 2023-03-12T18:23:34Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EpochMilliToTimestamp extends AbstractEpochToTimestamp {
-
- public EpochMilliToTimestamp() {
- super(true);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/EpochToTimestamp.java b/sqrl-time/src/main/java/com/datasqrl/time/EpochToTimestamp.java
deleted file mode 100644
index 8db0d5b..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/EpochToTimestamp.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-
-/**
- * Converts the epoch timestamp in seconds to the corresponding timestamp. E.g.
- * epochToTimestamp(1678645414) returns the timestamp 2023-03-12T18:23:34Z
- */
-@AutoService(StandardLibraryFunction.class)
-public class EpochToTimestamp extends AbstractEpochToTimestamp {
-
- public EpochToTimestamp() {
- super(false);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/ParseTimestamp.java b/sqrl-time/src/main/java/com/datasqrl/time/ParseTimestamp.java
deleted file mode 100644
index 0deb78c..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/ParseTimestamp.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.FlinkTypeUtil.VariableArguments;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.time.format.DateTimeFormatter;
-import java.util.Locale;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/** Parses a timestamp from an ISO timestamp string. */
-@Slf4j
-@AutoService(StandardLibraryFunction.class)
-public class ParseTimestamp extends ScalarFunction implements StandardLibraryFunction {
-
- public Instant eval(String s) {
- return Instant.parse(s);
- }
-
- public Instant eval(String s, String format) {
- DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format, Locale.US);
- try {
- return LocalDateTime.parse(s, formatter).atZone(ZoneId.systemDefault()).toInstant();
- } catch (Exception e) {
- log.warn(e.getMessage());
- return null;
- }
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .inputTypeStrategy(
- VariableArguments.builder()
- .staticType(DataTypes.STRING())
- .variableType(DataTypes.STRING())
- .minVariableArguments(0)
- .maxVariableArguments(1)
- .build())
- .outputTypeStrategy(
- FlinkTypeUtil.nullPreservingOutputStrategy(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)))
- .build();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimeFunctions.java b/sqrl-time/src/main/java/com/datasqrl/time/TimeFunctions.java
deleted file mode 100644
index 8d78cc1..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimeFunctions.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-public class TimeFunctions {
- public static final EpochToTimestamp EPOCH_TO_TIMESTAMP = new EpochToTimestamp();
- public static final EpochMilliToTimestamp EPOCH_MILLI_TO_TIMESTAMP = new EpochMilliToTimestamp();
- public static final TimestampToEpoch TIMESTAMP_TO_EPOCH = new TimestampToEpoch();
- public static final TimestampToEpochMilli TIMESTAMP_TO_EPOCH_MILLI = new TimestampToEpochMilli();
- public static final ParseTimestamp STRING_TO_TIMESTAMP = new ParseTimestamp();
- public static final TimestampToString TIMESTAMP_TO_STRING = new TimestampToString();
- public static final AtZone AT_ZONE = new AtZone();
- public static final EndOfSecond END_OF_SECOND = new EndOfSecond();
- public static final EndOfMinute END_OF_MINUTE = new EndOfMinute();
- public static final EndOfHour END_OF_HOUR = new EndOfHour();
- public static final EndOfDay END_OF_DAY = new EndOfDay();
- public static final EndOfWeek END_OF_WEEK = new EndOfWeek();
- public static final EndOfMonth END_OF_MONTH = new EndOfMonth();
- public static final EndOfYear END_OF_YEAR = new EndOfYear();
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunction.java b/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunction.java
deleted file mode 100644
index 0c449f9..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunction.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.FlinkTypeUtil.VariableArguments;
-import com.datasqrl.function.StandardLibraryFunction;
-import java.time.Duration;
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.ZonedDateTime;
-import java.time.temporal.ChronoUnit;
-import java.time.temporal.TemporalAdjusters;
-import lombok.AllArgsConstructor;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-@AllArgsConstructor
-public abstract class TimeTumbleWindowFunction extends ScalarFunction
- implements TimeTumbleWindowFunctionEval, StandardLibraryFunction {
-
- protected final ChronoUnit timeUnit;
- protected final ChronoUnit offsetUnit;
-
- public ChronoUnit getTimeUnit() {
- return timeUnit;
- }
-
- public ChronoUnit getOffsetUnit() {
- return offsetUnit;
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return TypeInference.newBuilder()
- .inputTypeStrategy(
- VariableArguments.builder()
- .staticType(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))
- .variableType(DataTypes.BIGINT())
- .minVariableArguments(0)
- .maxVariableArguments(2)
- .build())
- .outputTypeStrategy(
- FlinkTypeUtil.nullPreservingOutputStrategy(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)))
- .build();
- }
-
- @Override
- public Instant eval(Instant instant, Long multiple, Long offset) {
- if (multiple == null) {
- multiple = 1L;
- }
- // Preconditions.checkArgument(multiple > 0, "Window width must be positive: %s", multiple);
- if (offset == null) {
- offset = 0L;
- }
- // Preconditions.checkArgument(offset >= 0, "Invalid window offset: %s", offset);
- // Preconditions.checkArgument(
- // offsetUnit.getDuration().multipliedBy(offset).compareTo(timeUnit.getDuration()) < 0,
- // "Offset of %s %s is larger than %s", offset, offsetUnit, timeUnit);
-
- ZonedDateTime time = ZonedDateTime.ofInstant(instant, ZoneOffset.UTC);
- ZonedDateTime truncated = time.minus(offset, offsetUnit).truncatedTo(timeUnit);
-
- long multipleToAdd = 1;
- if (multiple > 1) {
- ZonedDateTime truncatedBase =
- truncated.with(TemporalAdjusters.firstDayOfYear()).truncatedTo(ChronoUnit.DAYS);
- ZonedDateTime timeBase =
- time.with(TemporalAdjusters.firstDayOfYear()).truncatedTo(ChronoUnit.DAYS);
- if (!timeBase.equals(truncatedBase)) {
- // We slipped into the prior base unit (i.e. year) due to offset.
- return timeBase.plus(offset, offsetUnit).minusNanos(1).toInstant();
- }
- Duration timeToBase = Duration.between(truncatedBase, truncated);
- long numberToBase = timeToBase.dividedBy(timeUnit.getDuration());
- multipleToAdd = multiple - (numberToBase % multiple);
- }
-
- return truncated
- .plus(multipleToAdd, timeUnit)
- .plus(offset, offsetUnit)
- .minusNanos(1)
- .toInstant();
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunctionEval.java b/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunctionEval.java
deleted file mode 100644
index 4b27852..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimeTumbleWindowFunctionEval.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import java.time.Instant;
-
-public interface TimeTumbleWindowFunctionEval {
-
- Instant eval(Instant instant, Long multiple, Long offset);
-
- default Instant eval(Instant instant, Long multiple) {
- return eval(instant, multiple, 0L);
- }
-
- default Instant eval(Instant instant) {
- return eval(instant, 1L);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpoch.java b/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpoch.java
deleted file mode 100644
index c1d2b0c..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpoch.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-
-/**
- * Returns the seconds since epoch for the given timestamp. E.g.
- * timestampToEpoch(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the number 1678645414
- */
-@AutoService(StandardLibraryFunction.class)
-public class TimestampToEpoch extends AbstractTimestampToEpoch {
-
- public TimestampToEpoch() {
- super(false);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpochMilli.java b/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpochMilli.java
deleted file mode 100644
index 40ab47a..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToEpochMilli.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-
-/**
- * Returns the seconds since epoch for the given timestamp. E.g.
- * timestampToEpoch(parseTimestamp(2023-03-12T18:23:34.083Z)) returns the number 1678645414
- */
-@AutoService(StandardLibraryFunction.class)
-public class TimestampToEpochMilli extends AbstractTimestampToEpoch {
-
- public TimestampToEpochMilli() {
- super(true);
- }
-}
diff --git a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToString.java b/sqrl-time/src/main/java/com/datasqrl/time/TimestampToString.java
deleted file mode 100644
index 2955539..0000000
--- a/sqrl-time/src/main/java/com/datasqrl/time/TimestampToString.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.time;
-
-import com.datasqrl.function.FlinkTypeUtil;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import java.time.Instant;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.catalog.DataTypeFactory;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.table.types.inference.TypeInference;
-
-/** Converts the timestamp to an ISO timestamp string */
-@AutoService(StandardLibraryFunction.class)
-public class TimestampToString extends ScalarFunction implements StandardLibraryFunction {
-
- public String eval(Instant instant) {
- return instant.toString();
- }
-
- @Override
- public TypeInference getTypeInference(DataTypeFactory typeFactory) {
- return FlinkTypeUtil.basicNullInference(
- DataTypes.STRING(), DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3));
- }
-}
diff --git a/sqrl-vector/pom.xml b/sqrl-vector/pom.xml
deleted file mode 100644
index 1b7e3a4..0000000
--- a/sqrl-vector/pom.xml
+++ /dev/null
@@ -1,115 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.datasqrl.flink
- sqrl-flink-parent
- 0.1-SNAPSHOT
-
-
- sqrl-vector
-
- Vector functions for flink
-
- https://www.datasqrl.com/
-
-
- The Apache Software License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
- repo
-
-
-
-
- Daniel Henneberger
- daniel@datasqrl.com
- https://github.com/henneberger
-
-
-
- scm:git:https://github.com/DataSQRL/sqrl.git
- scm:git:https://github.com/DataSQRL/sqrl.git
- HEAD
- https://github.com/DataSQRL/sqrl
-
-
-
-
- org.apache.flink
- flink-table-api-java-bridge
- ${flink.version}
- provided
-
-
- org.postgresql
- postgresql
- ${postgres.version}
- provided
-
-
- com.google.guava
- guava
- 33.4.6-jre
-
-
-
- org.apache.flink
- flink-connector-jdbc
- 3.2.0-1.19
- provided
-
-
- ${project.groupId}
- sqrl-lib-common
- ${project.version}
-
-
- ${project.groupId}
- sqrl-jdbc-1.16
- ${project.version}
- provided
-
-
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
-
- shade
-
- package
-
-
-
- com.google.common
- com.datasqrl.com.google.common
-
-
-
-
-
-
-
-
-
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/AsciiTextTestEmbed.java b/sqrl-vector/src/main/java/com/datasqrl/vector/AsciiTextTestEmbed.java
deleted file mode 100644
index 52960ef..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/AsciiTextTestEmbed.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** A unuseful embedding function counts each character (modulo 256). Used for testing only. */
-@AutoService(StandardLibraryFunction.class)
-public class AsciiTextTestEmbed extends ScalarFunction implements StandardLibraryFunction {
-
- private static final int VECTOR_LENGTH = 256;
-
- public FlinkVectorType eval(String text) {
- double[] vector = new double[256];
- for (char c : text.toCharArray()) {
- vector[c % VECTOR_LENGTH] += 1;
- }
- return new FlinkVectorType(vector);
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/Center.java b/sqrl-vector/src/main/java/com/datasqrl/vector/Center.java
deleted file mode 100644
index 779b309..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/Center.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import static com.datasqrl.vector.VectorFunctions.VEC_TO_DOUBLE;
-import static com.datasqrl.vector.VectorFunctions.convert;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.AggregateFunction;
-
-/**
- * Aggregates vectors by computing the centroid, i.e. summing up all vectors and dividing the
- * resulting vector by the number of vectors.
- */
-@AutoService(StandardLibraryFunction.class)
-public class Center extends AggregateFunction
- implements StandardLibraryFunction {
-
- @Override
- public CenterAccumulator createAccumulator() {
- return new CenterAccumulator();
- }
-
- @Override
- public FlinkVectorType getValue(CenterAccumulator acc) {
- if (acc.count == 0) {
- return null;
- } else {
- return convert(acc.get());
- }
- }
-
- public void accumulate(CenterAccumulator acc, FlinkVectorType vector) {
- acc.add(VEC_TO_DOUBLE.eval(vector));
- }
-
- public void retract(CenterAccumulator acc, FlinkVectorType vector) {
- acc.substract(VEC_TO_DOUBLE.eval(vector));
- }
-
- public void merge(CenterAccumulator acc, Iterable iter) {
- for (CenterAccumulator a : iter) {
- acc.addAll(a);
- }
- }
-
- public void resetAccumulator(CenterAccumulator acc) {
- acc.count = 0;
- acc.sum = null;
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/CenterAccumulator.java b/sqrl-vector/src/main/java/com/datasqrl/vector/CenterAccumulator.java
deleted file mode 100644
index 4de2378..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/CenterAccumulator.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-// mutable accumulator of structured type for the aggregate function
-public class CenterAccumulator {
-
- public double[] sum = null;
- public int count = 0;
-
- public synchronized void add(double[] values) {
- if (count == 0) {
- sum = values.clone();
- count = 1;
- } else {
- // Preconditions.checkArgument(values.length == sum.length);
- for (int i = 0; i < values.length; i++) {
- sum[i] += values[i];
- }
- count++;
- }
- }
-
- public synchronized void addAll(CenterAccumulator other) {
- if (other.count == 0) {
- return;
- }
- if (this.count == 0) {
- this.sum = new double[other.sum.length];
- }
- // Preconditions.checkArgument(this.sum.length == other.sum.length);
- for (int i = 0; i < other.sum.length; i++) {
- this.sum[i] += other.sum[i];
- }
- this.count += other.count;
- }
-
- public double[] get() {
- // Preconditions.checkArgument(count > 0);
- double[] result = new double[sum.length];
- for (int i = 0; i < sum.length; i++) {
- result[i] = sum[i] / count;
- }
- return result;
- }
-
- public synchronized void substract(double[] values) {
- // Preconditions.checkArgument(values.length == sum.length);
- for (int i = 0; i < values.length; i++) {
- sum[i] -= values[i];
- }
- count--;
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/CosineDistance.java b/sqrl-vector/src/main/java/com/datasqrl/vector/CosineDistance.java
deleted file mode 100644
index 078f329..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/CosineDistance.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-
-/** Computes the cosine distance between two vectors */
-@AutoService(StandardLibraryFunction.class)
-public class CosineDistance extends CosineSimilarity {
-
- public double eval(FlinkVectorType vectorA, FlinkVectorType vectorB) {
- return 1 - super.eval(vectorA, vectorB);
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/CosineSimilarity.java b/sqrl-vector/src/main/java/com/datasqrl/vector/CosineSimilarity.java
deleted file mode 100644
index a4225ea..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/CosineSimilarity.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import static com.datasqrl.vector.VectorFunctions.VEC_TO_DOUBLE;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.commons.math3.linear.ArrayRealVector;
-import org.apache.commons.math3.linear.RealVector;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** Computes the cosine similarity between two vectors */
-@AutoService(StandardLibraryFunction.class)
-public class CosineSimilarity extends ScalarFunction implements StandardLibraryFunction {
-
- public double eval(FlinkVectorType vectorA, FlinkVectorType vectorB) {
- // Create RealVectors from the input arrays
- RealVector vA = new ArrayRealVector(VEC_TO_DOUBLE.eval(vectorA), false);
- RealVector vB = new ArrayRealVector(VEC_TO_DOUBLE.eval(vectorB), false);
-
- // Calculate the cosine similarity
- double dotProduct = vA.dotProduct(vB);
- double normalization = vA.getNorm() * vB.getNorm();
-
- return dotProduct / normalization;
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/DoubleToVector.java b/sqrl-vector/src/main/java/com/datasqrl/vector/DoubleToVector.java
deleted file mode 100644
index 656756e..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/DoubleToVector.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import com.datasqrl.function.SqrlCastFunction;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** Converts a double array to a vector */
-@AutoService(StandardLibraryFunction.class)
-public class DoubleToVector extends ScalarFunction
- implements StandardLibraryFunction, SqrlCastFunction {
-
- public FlinkVectorType eval(double[] array) {
- return new FlinkVectorType(array);
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/EuclideanDistance.java b/sqrl-vector/src/main/java/com/datasqrl/vector/EuclideanDistance.java
deleted file mode 100644
index 07456a5..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/EuclideanDistance.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import static com.datasqrl.vector.VectorFunctions.VEC_TO_DOUBLE;
-
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.commons.math3.linear.ArrayRealVector;
-import org.apache.commons.math3.linear.RealVector;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** Computes the euclidean distance between two vectors */
-@AutoService(StandardLibraryFunction.class)
-public class EuclideanDistance extends ScalarFunction implements StandardLibraryFunction {
-
- public double eval(FlinkVectorType vectorA, FlinkVectorType vectorB) {
- // Create RealVectors from the input arrays
- RealVector vA = new ArrayRealVector(VEC_TO_DOUBLE.eval(vectorA), false);
- RealVector vB = new ArrayRealVector(VEC_TO_DOUBLE.eval(vectorB), false);
- return vA.getDistance(vB);
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/PostgresVectorTypeSerializer.java b/sqrl-vector/src/main/java/com/datasqrl/vector/PostgresVectorTypeSerializer.java
deleted file mode 100644
index b8749bf..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/PostgresVectorTypeSerializer.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import com.datasqrl.type.JdbcTypeSerializer;
-import java.util.Arrays;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcDeserializationConverter;
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter.JdbcSerializationConverter;
-import org.apache.flink.table.data.RawValueData;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.postgresql.util.PGobject;
-
-public class PostgresVectorTypeSerializer implements JdbcTypeSerializer {
-
- @Override
- public String getDialectId() {
- return "postgres";
- }
-
- @Override
- public Class getConversionClass() {
- return FlinkVectorType.class;
- }
-
- @Override
- public String dialectTypeName() {
- return "vector";
- }
-
- @Override
- public GenericDeserializationConverter getDeserializerConverter() {
- return () ->
- (val) -> {
- FlinkVectorType t = (FlinkVectorType) val;
- return t.getValue();
- };
- }
-
- @Override
- public GenericSerializationConverter getSerializerConverter(
- LogicalType type) {
- FlinkVectorTypeSerializer flinkVectorTypeSerializer = new FlinkVectorTypeSerializer();
- return () ->
- (val, index, statement) -> {
- if (val != null && !val.isNullAt(index)) {
- RawValueData object = val.getRawValue(index);
- FlinkVectorType vec = object.toObject(flinkVectorTypeSerializer);
-
- if (vec != null) {
- PGobject pgObject = new PGobject();
- pgObject.setType("vector");
- pgObject.setValue(Arrays.toString(vec.getValue()));
- statement.setObject(index, pgObject);
- return;
- }
- }
- statement.setObject(index, null);
- };
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/README.md b/sqrl-vector/src/main/java/com/datasqrl/vector/README.md
deleted file mode 100644
index 6d16b4d..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-| Function Documentation |
-|-------------------------|
-| `AsciiTextTestEmbed(string) → vector` Convert text to a vector of length 256 where each character's ASCII value is mapped. Example: `AsciiTextTestEmbed('hello') → [0, 0, 0, ..., 1, 0, 1, 2, ...]` |
-| `Center(vector) → vector` Aggregate function to compute the center of multiple vectors. Example: `Center([1.0, 2.0], [3.0, 4.0]) → [2.0, 3.0]` |
-| `CosineDistance(vector, vector) → double` Compute the cosine distance between two vectors. Example: `CosineDistance([1.0, 0.0], [0.0, 1.0]) → 1.0` |
-| `CosineSimilarity(vector, vector) → double` Compute the cosine similarity between two vectors. Example: `CosineSimilarity([1.0, 0.0], [0.0, 1.0]) → 0.0` |
-| `DoubleToVector(array) → vector` Convert an array of doubles to a vector. Example: `DoubleToVector([1.0, 2.0, 3.0]) → [1.0, 2.0, 3.0]` |
-| `EuclideanDistance(vector, vector) → double` Compute the Euclidean distance between two vectors. Example: `EuclideanDistance([1.0, 0.0], [0.0, 1.0]) → 1.41421356237` |
-| `OnnxEmbed(string, string) → vector` Convert text to a vector using an ONNX model. Example: `OnnxEmbed('hello', '/path/to/model') → [0.5, 0.1, ...]` |
-| `VectorToDouble(vector) → array` Convert a vector to an array of doubles. Example: `VectorToDouble([1.0, 2.0, 3.0]) → [1.0, 2.0, 3.0]` |
\ No newline at end of file
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/VectorFunctions.java b/sqrl-vector/src/main/java/com/datasqrl/vector/VectorFunctions.java
deleted file mode 100644
index 1042bc4..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/VectorFunctions.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import java.util.Set;
-import org.apache.flink.table.functions.FunctionDefinition;
-
-public class VectorFunctions {
-
- public static final CosineSimilarity COSINE_SIMILARITY = new CosineSimilarity();
- public static final CosineDistance COSINE_DISTANCE = new CosineDistance();
-
- public static final EuclideanDistance EUCLIDEAN_DISTANCE = new EuclideanDistance();
-
- public static final VectorToDouble VEC_TO_DOUBLE = new VectorToDouble();
-
- public static final DoubleToVector DOUBLE_TO_VECTOR = new DoubleToVector();
-
- public static final AsciiTextTestEmbed ASCII_TEXT_TEST_EMBED = new AsciiTextTestEmbed();
-
- public static final Center CENTER = new Center();
-
- public static final Set functions =
- Set.of(
- COSINE_SIMILARITY,
- COSINE_DISTANCE,
- EUCLIDEAN_DISTANCE,
- VEC_TO_DOUBLE,
- DOUBLE_TO_VECTOR,
- ASCII_TEXT_TEST_EMBED,
- CENTER);
-
- public static FlinkVectorType convert(double[] vector) {
- return new FlinkVectorType(vector);
- }
-}
diff --git a/sqrl-vector/src/main/java/com/datasqrl/vector/VectorToDouble.java b/sqrl-vector/src/main/java/com/datasqrl/vector/VectorToDouble.java
deleted file mode 100644
index 59bb84d..0000000
--- a/sqrl-vector/src/main/java/com/datasqrl/vector/VectorToDouble.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright © 2024 DataSQRL (contact@datasqrl.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datasqrl.vector;
-
-import com.datasqrl.function.SqrlCastFunction;
-import com.datasqrl.function.StandardLibraryFunction;
-import com.google.auto.service.AutoService;
-import org.apache.flink.table.functions.ScalarFunction;
-
-/** Converts a vector to a double array */
-@AutoService(StandardLibraryFunction.class)
-public class VectorToDouble extends ScalarFunction
- implements StandardLibraryFunction, SqrlCastFunction {
-
- public double[] eval(FlinkVectorType vectorType) {
- return vectorType.getValue();
- }
-}
diff --git a/sqrl-vector/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer b/sqrl-vector/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
deleted file mode 100644
index 30ad3a4..0000000
--- a/sqrl-vector/src/main/resources/META-INF/services/com.datasqrl.type.JdbcTypeSerializer
+++ /dev/null
@@ -1 +0,0 @@
-com.datasqrl.vector.PostgresVectorTypeSerializer
\ No newline at end of file
diff --git a/flink-jar-runner/src/main/java/com/datasqrl/EnvironmentVariablesUtils.java b/src/main/java/com/datasqrl/EnvironmentVariablesUtils.java
similarity index 100%
rename from flink-jar-runner/src/main/java/com/datasqrl/EnvironmentVariablesUtils.java
rename to src/main/java/com/datasqrl/EnvironmentVariablesUtils.java
diff --git a/flink-jar-runner/src/main/java/com/datasqrl/FlinkMain.java b/src/main/java/com/datasqrl/FlinkMain.java
similarity index 100%
rename from flink-jar-runner/src/main/java/com/datasqrl/FlinkMain.java
rename to src/main/java/com/datasqrl/FlinkMain.java
diff --git a/flink-jar-runner/src/main/java/com/datasqrl/JsonEnvVarDeserializer.java b/src/main/java/com/datasqrl/JsonEnvVarDeserializer.java
similarity index 100%
rename from flink-jar-runner/src/main/java/com/datasqrl/JsonEnvVarDeserializer.java
rename to src/main/java/com/datasqrl/JsonEnvVarDeserializer.java
diff --git a/flink-jar-runner/src/main/java/com/datasqrl/SqlExecutor.java b/src/main/java/com/datasqrl/SqlExecutor.java
similarity index 94%
rename from flink-jar-runner/src/main/java/com/datasqrl/SqlExecutor.java
rename to src/main/java/com/datasqrl/SqlExecutor.java
index ebc9f3d..90fbaec 100644
--- a/flink-jar-runner/src/main/java/com/datasqrl/SqlExecutor.java
+++ b/src/main/java/com/datasqrl/SqlExecutor.java
@@ -15,7 +15,7 @@
*/
package com.datasqrl;
-import com.datasqrl.function.StandardLibraryFunction;
+import com.datasqrl.function.AutoRegisterSystemFunction;
import java.io.File;
import java.util.List;
import java.util.Map;
@@ -58,8 +58,8 @@ public SqlExecutor(Configuration configuration, String udfPath) {
public void setupSystemFunctions() {
- ServiceLoader standardLibraryFunctions =
- ServiceLoader.load(StandardLibraryFunction.class);
+ ServiceLoader standardLibraryFunctions =
+ ServiceLoader.load(AutoRegisterSystemFunction.class);
standardLibraryFunctions.forEach(
function -> {
@@ -74,9 +74,6 @@ public void setupSystemFunctions() {
}
static String getFunctionNameFromClass(Class clazz) {
- // String fctName = clazz.getSimpleName();
- // fctName = Character.toLowerCase(fctName.charAt(0)) + fctName.substring(1);
- // return fctName;
return clazz.getSimpleName().toLowerCase();
}
diff --git a/flink-jar-runner/src/main/java/com/datasqrl/SqlUtils.java b/src/main/java/com/datasqrl/SqlUtils.java
similarity index 100%
rename from flink-jar-runner/src/main/java/com/datasqrl/SqlUtils.java
rename to src/main/java/com/datasqrl/SqlUtils.java
diff --git a/sqrl-lib-common/src/main/java/com/datasqrl/function/StandardLibraryFunction.java b/src/main/java/com/datasqrl/function/AutoRegisterSystemFunction.java
similarity index 83%
rename from sqrl-lib-common/src/main/java/com/datasqrl/function/StandardLibraryFunction.java
rename to src/main/java/com/datasqrl/function/AutoRegisterSystemFunction.java
index e74fdc0..cbe1e55 100644
--- a/sqrl-lib-common/src/main/java/com/datasqrl/function/StandardLibraryFunction.java
+++ b/src/main/java/com/datasqrl/function/AutoRegisterSystemFunction.java
@@ -15,5 +15,4 @@
*/
package com.datasqrl.function;
-/** Marker interface for functions that are used by DataSQRL track internal functions */
-public interface StandardLibraryFunction {}
+public interface AutoRegisterSystemFunction {}
diff --git a/flink-jar-runner/src/main/resources/log4j2.properties b/src/main/resources/log4j2.properties
similarity index 100%
rename from flink-jar-runner/src/main/resources/log4j2.properties
rename to src/main/resources/log4j2.properties
diff --git a/flink-jar-runner/src/main/resources/log4j2.xml b/src/main/resources/log4j2.xml
similarity index 100%
rename from flink-jar-runner/src/main/resources/log4j2.xml
rename to src/main/resources/log4j2.xml
diff --git a/flink-jar-runner/src/test/docker/Dockerfile b/src/test/docker/Dockerfile
similarity index 100%
rename from flink-jar-runner/src/test/docker/Dockerfile
rename to src/test/docker/Dockerfile
diff --git a/flink-jar-runner/src/test/docker/docker-compose.yml b/src/test/docker/docker-compose.yml
similarity index 100%
rename from flink-jar-runner/src/test/docker/docker-compose.yml
rename to src/test/docker/docker-compose.yml
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/AbstractITSupport.java b/src/test/java/com/datasqrl/AbstractITSupport.java
similarity index 100%
rename from flink-jar-runner/src/test/java/com/datasqrl/AbstractITSupport.java
rename to src/test/java/com/datasqrl/AbstractITSupport.java
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/CommandLineUtil.java b/src/test/java/com/datasqrl/CommandLineUtil.java
similarity index 100%
rename from flink-jar-runner/src/test/java/com/datasqrl/CommandLineUtil.java
rename to src/test/java/com/datasqrl/CommandLineUtil.java
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/EnvironmentVariablesUtilsTest.java b/src/test/java/com/datasqrl/EnvironmentVariablesUtilsTest.java
similarity index 100%
rename from flink-jar-runner/src/test/java/com/datasqrl/EnvironmentVariablesUtilsTest.java
rename to src/test/java/com/datasqrl/EnvironmentVariablesUtilsTest.java
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/FlinkMainIT.java b/src/test/java/com/datasqrl/FlinkMainIT.java
similarity index 100%
rename from flink-jar-runner/src/test/java/com/datasqrl/FlinkMainIT.java
rename to src/test/java/com/datasqrl/FlinkMainIT.java
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/SqlRunnerTest.java b/src/test/java/com/datasqrl/SqlRunnerTest.java
similarity index 100%
rename from flink-jar-runner/src/test/java/com/datasqrl/SqlRunnerTest.java
rename to src/test/java/com/datasqrl/SqlRunnerTest.java
diff --git a/flink-jar-runner/src/test/java/com/datasqrl/SqlUtilsTest.java b/src/test/java/com/datasqrl/SqlUtilsTest.java
similarity index 95%
rename from flink-jar-runner/src/test/java/com/datasqrl/SqlUtilsTest.java
rename to src/test/java/com/datasqrl/SqlUtilsTest.java
index 2f27a62..35e145a 100644
--- a/flink-jar-runner/src/test/java/com/datasqrl/SqlUtilsTest.java
+++ b/src/test/java/com/datasqrl/SqlUtilsTest.java
@@ -27,7 +27,7 @@
class SqlUtilsTest {
@ParameterizedTest(name = "{0}")
- @CsvSource({"flink.sql,32", "test_sql.sql,6", "test_udf_sql.sql,6"})
+ @CsvSource({"flink.sql,18", "test_sql.sql,6", "test_udf_sql.sql,6"})
void givenSource_when_thenSplitCorrectly(String filename, int numberOfStatements)
throws IOException, Exception {
var script = Resources.toString(getClass().getResource("/sql/" + filename), Charsets.UTF_8);
diff --git a/flink-jar-runner/src/test/resources/config/config.yaml b/src/test/resources/config/config.yaml
similarity index 100%
rename from flink-jar-runner/src/test/resources/config/config.yaml
rename to src/test/resources/config/config.yaml
diff --git a/flink-jar-runner/src/test/resources/config/flink-conf.yaml b/src/test/resources/config/flink-conf.yaml
similarity index 100%
rename from flink-jar-runner/src/test/resources/config/flink-conf.yaml
rename to src/test/resources/config/flink-conf.yaml
diff --git a/flink-jar-runner/src/test/resources/datasources/cardAssignment.jsonl b/src/test/resources/datasources/cardAssignment.jsonl
similarity index 100%
rename from flink-jar-runner/src/test/resources/datasources/cardAssignment.jsonl
rename to src/test/resources/datasources/cardAssignment.jsonl
diff --git a/flink-jar-runner/src/test/resources/datasources/merchant.jsonl b/src/test/resources/datasources/merchant.jsonl
similarity index 100%
rename from flink-jar-runner/src/test/resources/datasources/merchant.jsonl
rename to src/test/resources/datasources/merchant.jsonl
diff --git a/flink-jar-runner/src/test/resources/datasources/merchantReward.jsonl b/src/test/resources/datasources/merchantReward.jsonl
similarity index 100%
rename from flink-jar-runner/src/test/resources/datasources/merchantReward.jsonl
rename to src/test/resources/datasources/merchantReward.jsonl
diff --git a/flink-jar-runner/src/test/resources/datasources/transaction.jsonl b/src/test/resources/datasources/transaction.jsonl
similarity index 100%
rename from flink-jar-runner/src/test/resources/datasources/transaction.jsonl
rename to src/test/resources/datasources/transaction.jsonl
diff --git a/flink-jar-runner/src/test/resources/log4j2.properties b/src/test/resources/log4j2.properties
similarity index 100%
rename from flink-jar-runner/src/test/resources/log4j2.properties
rename to src/test/resources/log4j2.properties
diff --git a/flink-jar-runner/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml
similarity index 100%
rename from flink-jar-runner/src/test/resources/log4j2.xml
rename to src/test/resources/log4j2.xml
diff --git a/flink-jar-runner/src/test/resources/plans/compiled-plan-udf.json b/src/test/resources/plans/compiled-plan-udf.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/plans/compiled-plan-udf.json
rename to src/test/resources/plans/compiled-plan-udf.json
diff --git a/flink-jar-runner/src/test/resources/plans/compiled-plan.json b/src/test/resources/plans/compiled-plan.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/plans/compiled-plan.json
rename to src/test/resources/plans/compiled-plan.json
diff --git a/flink-jar-runner/src/test/resources/plans/test_plan.json b/src/test/resources/plans/test_plan.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/plans/test_plan.json
rename to src/test/resources/plans/test_plan.json
diff --git a/flink-jar-runner/src/test/resources/sql/flink.sql b/src/test/resources/sql/flink.sql
similarity index 60%
rename from flink-jar-runner/src/test/resources/sql/flink.sql
rename to src/test/resources/sql/flink.sql
index a2f4178..9040222 100644
--- a/flink-jar-runner/src/test/resources/sql/flink.sql
+++ b/src/test/resources/sql/flink.sql
@@ -1,72 +1,60 @@
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `timestamptostring` AS 'com.datasqrl.time.TimestampToString' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofmonth` AS 'com.datasqrl.time.EndOfMonth' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `timestamptoepochmilli` AS 'com.datasqrl.time.TimestampToEpochMilli' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofweek` AS 'com.datasqrl.time.EndOfWeek' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `parsetimestamp` AS 'com.datasqrl.time.ParseTimestamp' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `epochmillitotimestamp` AS 'com.datasqrl.time.EpochMilliToTimestamp' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofminute` AS 'com.datasqrl.time.EndOfMinute' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `timestamptoepoch` AS 'com.datasqrl.time.TimestampToEpoch' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofsecond` AS 'com.datasqrl.time.EndOfSecond' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `epochtotimestamp` AS 'com.datasqrl.time.EpochToTimestamp' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `atzone` AS 'com.datasqrl.time.AtZone' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofday` AS 'com.datasqrl.time.EndOfDay' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofhour` AS 'com.datasqrl.time.EndOfHour' LANGUAGE JAVA;
-
-CREATE TEMPORARY FUNCTION IF NOT EXISTS `endofyear` AS 'com.datasqrl.time.EndOfYear' LANGUAGE JAVA;
+-- Updated Flink SQL using datagen connector and BIGINT for timestamps, with full pipeline and view updates
CREATE TEMPORARY TABLE `transaction_1` (
`transactionId` BIGINT NOT NULL,
`cardNo` DOUBLE NOT NULL,
- `time` TIMESTAMP(3) WITH LOCAL TIME ZONE NOT NULL,
+ `time` TIMESTAMP_LTZ(3) NOT NULL,
`amount` DOUBLE NOT NULL,
`merchantId` BIGINT NOT NULL,
PRIMARY KEY (`transactionId`, `time`) NOT ENFORCED,
- WATERMARK FOR `time` AS `time` - INTERVAL '1.0' SECOND
+ WATERMARK FOR `time` AS `time` - INTERVAL '1' SECOND
) WITH (
- 'format' = 'flexible-json',
- 'path' = 'file:///datasources/transaction.jsonl',
- 'source.monitor-interval' = '1 min',
- 'connector' = 'filesystem'
+ 'connector' = 'datagen',
+ 'rows-per-second' = '10',
+ 'fields.transactionId.kind' = 'sequence',
+ 'fields.transactionId.start' = '1',
+ 'fields.transactionId.end' = '1000000',
+ 'fields.cardNo.min' = '1000',
+ 'fields.cardNo.max' = '9999',
+ 'fields.amount.min' = '1',
+ 'fields.amount.max' = '5000',
+ 'fields.merchantId.min' = '1',
+ 'fields.merchantId.max' = '100'
);
CREATE TEMPORARY TABLE `cardassignment_1` (
`customerId` BIGINT NOT NULL,
`cardNo` DOUBLE NOT NULL,
- `timestamp` TIMESTAMP(3) WITH LOCAL TIME ZONE NOT NULL,
- `cardType` VARCHAR(2147483647) CHARACTER SET `UTF-16LE` NOT NULL,
+ `timestamp` TIMESTAMP_LTZ(3) NOT NULL,
+ `cardType` VARCHAR(100) NOT NULL,
PRIMARY KEY (`customerId`, `cardNo`, `timestamp`) NOT ENFORCED,
- WATERMARK FOR `timestamp` AS `timestamp` - INTERVAL '1.0' SECOND
+ WATERMARK FOR `timestamp` AS `timestamp` - INTERVAL '1' SECOND
) WITH (
- 'format' = 'flexible-json',
- 'path' = 'file:///datasources/cardAssignment.jsonl',
- 'source.monitor-interval' = '1 min',
- 'connector' = 'filesystem'
+ 'connector' = 'datagen',
+ 'rows-per-second' = '5',
+ 'fields.customerId.kind' = 'sequence',
+ 'fields.customerId.start' = '1',
+ 'fields.customerId.end' = '10000',
+ 'fields.cardNo.min' = '1000',
+ 'fields.cardNo.max' = '9999',
+ 'fields.cardType.length' = '10'
);
CREATE TEMPORARY TABLE `merchant_1` (
`merchantId` BIGINT NOT NULL,
- `name` VARCHAR(2147483647) CHARACTER SET `UTF-16LE` NOT NULL,
- `category` VARCHAR(2147483647) CHARACTER SET `UTF-16LE` NOT NULL,
- `updatedTime` TIMESTAMP(3) WITH LOCAL TIME ZONE NOT NULL,
+ `name` VARCHAR(100) NOT NULL,
+ `category` VARCHAR(50) NOT NULL,
+ `updatedTime` TIMESTAMP_LTZ(3) NOT NULL,
PRIMARY KEY (`merchantId`, `updatedTime`) NOT ENFORCED,
- WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '1.0' SECOND
+ WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '1' SECOND
) WITH (
- 'format' = 'flexible-json',
- 'path' = 'file:///datasources/merchant.jsonl',
- 'source.monitor-interval' = '1 min',
- 'connector' = 'filesystem'
+ 'connector' = 'datagen',
+ 'rows-per-second' = '2',
+ 'fields.merchantId.kind' = 'sequence',
+ 'fields.merchantId.start' = '1',
+ 'fields.merchantId.end' = '100',
+ 'fields.name.length' = '20',
+ 'fields.category.length' = '10'
);
CREATE TEMPORARY TABLE `_spendingbyday_1` (
@@ -76,7 +64,7 @@ CREATE TEMPORARY TABLE `_spendingbyday_1` (
PRIMARY KEY (`customerid`, `timeDay`) NOT ENFORCED
) WITH (
'password' = '${JDBC_PASSWORD}',
- 'connector' = 'jdbc-sqrl',
+ 'connector' = 'jdbc',
'driver' = 'org.postgresql.Driver',
'table-name' = '_spendingbyday_1',
'url' = '${JDBC_URL}',
@@ -94,7 +82,7 @@ CREATE TEMPORARY TABLE `customertransaction_1` (
PRIMARY KEY (`transactionId`, `time`) NOT ENFORCED
) WITH (
'password' = '${JDBC_PASSWORD}',
- 'connector' = 'jdbc-sqrl',
+ 'connector' = 'jdbc',
'driver' = 'org.postgresql.Driver',
'table-name' = 'customertransaction_1',
'url' = '${JDBC_URL}',
@@ -109,7 +97,7 @@ CREATE TEMPORARY TABLE `spendingbycategory_1` (
PRIMARY KEY (`customerid`, `timeWeek`, `category`) NOT ENFORCED
) WITH (
'password' = '${JDBC_PASSWORD}',
- 'connector' = 'jdbc-sqrl',
+ 'connector' = 'jdbc',
'driver' = 'org.postgresql.Driver',
'table-name' = 'spendingbycategory_1',
'url' = '${JDBC_URL}',
@@ -132,7 +120,7 @@ WHERE `_rownum` = 1;
CREATE VIEW `table$3`
AS
-SELECT `$cor0`.`customerId` AS `customerid`, ENDOFDAY(`$cor0`.`time`) AS `timeDay`, `$cor0`.`amount`, `$cor0`.`transactionId`, `$cor0`.`time`
+SELECT `$cor0`.`customerId` AS `customerid`, CAST(FLOOR(`$cor0`.`time` TO DAY) + INTERVAL '1' DAY AS TIMESTAMP_LTZ(3)) AS `timeDay`, `$cor0`.`amount`, `$cor0`.`transactionId`, `$cor0`.`time`
FROM (SELECT *
FROM `transaction_1` AS `$cor1`
INNER JOIN `table$2` FOR SYSTEM_TIME AS OF `$cor1`.`time` AS `t2` ON `$cor1`.`cardNo` = `t2`.`cardNo`) AS `$cor0`
@@ -182,11 +170,21 @@ WHERE `_rownum` = 1;
CREATE VIEW `table$10`
AS
-SELECT `$cor6`.`customerId` AS `customerid`, ENDOFWEEK(`$cor6`.`time`) AS `timeWeek`, `t0`.`category`, `$cor6`.`amount`, `$cor6`.`transactionId`, `$cor6`.`time`
-FROM (SELECT *
+SELECT
+ `$cor6`.`customerId` AS `customerid`,
+ CAST(FLOOR(`$cor6`.`time` TO DAY) + INTERVAL '1' DAY * (7 - EXTRACT(DOW FROM `$cor6`.`time`)) AS TIMESTAMP_LTZ(3)) AS `timeWeek`,
+ `t0`.`category`,
+ `$cor6`.`amount`,
+ `$cor6`.`transactionId`,
+ `$cor6`.`time`
+FROM (
+ SELECT *
FROM `transaction_1` AS `$cor7`
- INNER JOIN `table$9` FOR SYSTEM_TIME AS OF `$cor7`.`time` AS `t2` ON `$cor7`.`cardNo` = `t2`.`cardNo`) AS `$cor6`
- INNER JOIN `table$8` FOR SYSTEM_TIME AS OF `$cor6`.`time` AS `t0` ON `$cor6`.`merchantId` = `t0`.`merchantId`;
+ INNER JOIN `table$9` FOR SYSTEM_TIME AS OF `$cor7`.`time` AS `t2`
+ ON `$cor7`.`cardNo` = `t2`.`cardNo`
+) AS `$cor6`
+INNER JOIN `table$8` FOR SYSTEM_TIME AS OF `$cor6`.`time` AS `t0`
+ ON `$cor6`.`merchantId` = `t0`.`merchantId`;
CREATE VIEW `table$11`
AS
@@ -200,12 +198,11 @@ INSERT INTO `_spendingbyday_1`
FROM `table$4`)
;
INSERT INTO `customertransaction_1`
- (SELECT *
+(SELECT *
FROM `table$7`)
- ;
- INSERT INTO `spendingbycategory_1`
- (SELECT *
+;
+INSERT INTO `spendingbycategory_1`
+(SELECT *
FROM `table$11`)
- ;
- END;
-
+;
+END;
diff --git a/flink-jar-runner/src/test/resources/sql/test_sql.sql b/src/test/resources/sql/test_sql.sql
similarity index 100%
rename from flink-jar-runner/src/test/resources/sql/test_sql.sql
rename to src/test/resources/sql/test_sql.sql
diff --git a/flink-jar-runner/src/test/resources/sql/test_udf_sql.sql b/src/test/resources/sql/test_udf_sql.sql
similarity index 100%
rename from flink-jar-runner/src/test/resources/sql/test_udf_sql.sql
rename to src/test/resources/sql/test_udf_sql.sql
diff --git a/flink-jar-runner/src/test/resources/sqrl/compiled-plan.json b/src/test/resources/sqrl/compiled-plan.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/compiled-plan.json
rename to src/test/resources/sqrl/compiled-plan.json
diff --git a/flink-jar-runner/src/test/resources/sqrl/creditcard-local/transaction.schema.yml b/src/test/resources/sqrl/creditcard-local/transaction.schema.yml
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/creditcard-local/transaction.schema.yml
rename to src/test/resources/sqrl/creditcard-local/transaction.schema.yml
diff --git a/flink-jar-runner/src/test/resources/sqrl/creditcard-local/transaction.table.json b/src/test/resources/sqrl/creditcard-local/transaction.table.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/creditcard-local/transaction.table.json
rename to src/test/resources/sqrl/creditcard-local/transaction.table.json
diff --git a/flink-jar-runner/src/test/resources/sqrl/package.json b/src/test/resources/sqrl/package.json
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/package.json
rename to src/test/resources/sqrl/package.json
diff --git a/flink-jar-runner/src/test/resources/sqrl/postgres-schema.sql b/src/test/resources/sqrl/postgres-schema.sql
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/postgres-schema.sql
rename to src/test/resources/sqrl/postgres-schema.sql
diff --git a/flink-jar-runner/src/test/resources/sqrl/script.sqrl b/src/test/resources/sqrl/script.sqrl
similarity index 100%
rename from flink-jar-runner/src/test/resources/sqrl/script.sqrl
rename to src/test/resources/sqrl/script.sqrl
diff --git a/flink-jar-runner/src/test/resources/udfs/myjavafunction-0.1.0-SNAPSHOT-all.jar b/src/test/resources/udfs/myjavafunction-0.1.0-SNAPSHOT-all.jar
similarity index 100%
rename from flink-jar-runner/src/test/resources/udfs/myjavafunction-0.1.0-SNAPSHOT-all.jar
rename to src/test/resources/udfs/myjavafunction-0.1.0-SNAPSHOT-all.jar