1717
1818package com .datasqrl ;
1919
20+ import java .io .File ;
21+ import java .util .Map ;
22+ import java .util .TreeMap ;
23+ import java .util .concurrent .Callable ;
2024import lombok .SneakyThrows ;
2125import lombok .extern .slf4j .Slf4j ;
2226import org .apache .flink .configuration .Configuration ;
2327import org .apache .flink .configuration .GlobalConfiguration ;
2428import org .apache .flink .shaded .jackson2 .com .fasterxml .jackson .databind .ObjectMapper ;
2529import org .apache .flink .shaded .jackson2 .com .fasterxml .jackson .databind .module .SimpleModule ;
26- import org .apache .flink .table .api .CompiledPlan ;
2730import org .apache .flink .table .api .TableResult ;
28- import org .apache .flink .table .api .internal .TableEnvironmentImpl ;
29- import org .apache .flink .table .operations .StatementSetOperation ;
3031import org .apache .flink .util .FileUtils ;
3132import picocli .CommandLine ;
32- import picocli .CommandLine .* ;
33- import java . io . File ;
34- import java . util .*;
35- import java . util . concurrent . Callable ;
36-
37- /**
38- * Main class for executing SQL scripts using picocli.
39- */
40- @ Command ( name = "SqlRunner" , mixinStandardHelpOptions = true , version = "1.0" , description = "Runs SQL scripts using Flink TableEnvironment." )
33+ import picocli .CommandLine .Command ;
34+ import picocli . CommandLine . Option ;
35+
36+ /** Main class for executing SQL scripts using picocli. */
37+ @ Command (
38+ name = "SqlRunner" ,
39+ mixinStandardHelpOptions = true ,
40+ version = "1.0" ,
41+ description = "Runs SQL scripts using Flink TableEnvironment." )
4142@ Slf4j
4243public class SqlRunner implements Callable <Integer > {
4344
44- @ Option (names = {"-s" , "--sqlfile" }, description = "SQL file to execute." )
45+ @ Option (
46+ names = {"-s" , "--sqlfile" },
47+ description = "SQL file to execute." )
4548 private File sqlFile ;
4649
47- @ Option (names = {"--block" }, description = "Wait for the flink job manager to exit." ,
48- defaultValue = "false" )
50+ @ Option (
51+ names = {"--block" },
52+ description = "Wait for the flink job manager to exit." ,
53+ defaultValue = "false" )
4954 private boolean block ;
5055
51- @ Option (names = {"--planfile" }, description = "Compiled plan JSON file." )
56+ @ Option (
57+ names = {"--planfile" },
58+ description = "Compiled plan JSON file." )
5259 private File planFile ;
5360
54- @ Option (names = {"--configfile" }, description = "Configuration YAML file." )
61+ @ Option (
62+ names = {"--configfile" },
63+ description = "Configuration YAML file." )
5564 private File configFile ;
5665
57- @ Option (names = {"--udfpath" }, description = "Path to UDFs." )
66+ @ Option (
67+ names = {"--udfpath" },
68+ description = "Path to UDFs." )
5869 private String udfPath ;
5970
6071 public static void main (String [] args ) {
61- int exitCode = new CommandLine (new SqlRunner ()).execute (args );
72+ var exitCode = new CommandLine (new SqlRunner ()).execute (args );
6273 System .exit (exitCode );
6374 }
6475
@@ -70,22 +81,27 @@ public Integer call() throws Exception {
7081 }
7182
7283 // Load configuration if configFile is provided
73- Configuration configuration = new Configuration ();
84+ var configuration = new Configuration ();
7485 if (configFile != null ) {
7586 configuration = loadConfigurationFromYaml (configFile );
7687 }
7788
89+ log .info ("Environment variables" );
90+ TreeMap <String , String > envVariables = new TreeMap <>(System .getenv ());
91+ envVariables .forEach ((name , value ) -> log .info ("{}: {}" , name , value ));
92+
7893 // Initialize SqlExecutor
79- SqlExecutor sqlExecutor = new SqlExecutor (configuration , udfPath );
94+ var sqlExecutor = new SqlExecutor (configuration , udfPath , envVariables );
8095 TableResult tableResult ;
8196 // Input validation and execution logic
8297 if (sqlFile != null ) {
8398 // Single SQL file mode
84- String script = FileUtils .readFileUtf8 (sqlFile );
99+ var script = FileUtils .readFileUtf8 (sqlFile );
100+ EnvironmentVariablesUtils .validateEnvironmentVariables (envVariables , script );
85101 tableResult = sqlExecutor .executeScript (script );
86102 } else if (planFile != null ) {
87103 // Compiled plan JSON file
88- String planJson = FileUtils .readFileUtf8 (planFile );
104+ var planJson = FileUtils .readFileUtf8 (planFile );
89105 planJson = replaceScriptWithEnv (planJson );
90106
91107 tableResult = sqlExecutor .executeCompiledPlan (planJson );
@@ -110,12 +126,11 @@ private String replaceScriptWithEnv(String script) {
110126 return objectMapper .writeValueAsString (map );
111127 }
112128
113-
114129 public static ObjectMapper getObjectMapper () {
115- ObjectMapper objectMapper = new ObjectMapper ();
130+ var objectMapper = new ObjectMapper ();
116131
117132 // Register the custom deserializer module
118- SimpleModule module = new SimpleModule ();
133+ var module = new SimpleModule ();
119134 module .addDeserializer (String .class , new JsonEnvVarDeserializer ());
120135 objectMapper .registerModule (module );
121136 return objectMapper ;
@@ -130,7 +145,7 @@ public static ObjectMapper getObjectMapper() {
130145 */
131146 private Configuration loadConfigurationFromYaml (File configFile ) throws Exception {
132147 log .info ("Loading configuration from {}" , configFile .getAbsolutePath ());
133- Configuration configuration = GlobalConfiguration .loadConfiguration (configFile .getAbsolutePath ());
148+ var configuration = GlobalConfiguration .loadConfiguration (configFile .getAbsolutePath ());
134149 return configuration ;
135150 }
136- }
151+ }
0 commit comments