diff --git a/.classpath b/.classpath new file mode 100755 index 0000000..e5dc6a3 --- /dev/null +++ b/.classpath @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/.gitignore b/.gitignore new file mode 100755 index 0000000..3d7a79a --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +/tmp +/logs/ +/bin/ +/lib/**/ivy*.jar +.settings/org.eclipse.jdt.groovy.core.prefs +ISCCR.log +build.number diff --git a/.project b/.project new file mode 100755 index 0000000..1198278 --- /dev/null +++ b/.project @@ -0,0 +1,25 @@ + + + henning_sagdevops-ci-assets + + + + + + org.eclipse.wst.common.project.facet.core.builder + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.groovy.core.groovyNature + org.eclipse.jdt.core.javanature + org.eclipse.wst.common.project.facet.core.nature + org.apache.ivyde.eclipse.ivynature + + diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000..0c68a61 --- /dev/null +++ b/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,7 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/.settings/org.eclipse.jdt.groovy.core.prefs b/.settings/org.eclipse.jdt.groovy.core.prefs new file mode 100644 index 0000000..65c6dc8 --- /dev/null +++ b/.settings/org.eclipse.jdt.groovy.core.prefs @@ -0,0 +1,2 @@ +eclipse.preferences.version=1 +groovy.compiler.level=-1 diff --git a/.settings/org.eclipse.wst.common.project.facet.core.xml b/.settings/org.eclipse.wst.common.project.facet.core.xml new file mode 100644 index 0000000..f4ef8aa --- /dev/null +++ b/.settings/org.eclipse.wst.common.project.facet.core.xml @@ -0,0 +1,4 @@ + + + + diff --git a/LICENSE b/LICENSE old mode 100644 new mode 100755 diff --git a/README.md b/README.md old mode 100644 new mode 100755 index a670432..c61d8de --- a/README.md +++ b/README.md @@ -3,61 +3,66 @@ Software AG DevOps library to support assets CI (continuous integration) with we ## Description -sagdevops-ci-assets is a library that easily enables CI for your webMethods projects. You can setup your infrastructure in minutes and then deploy flowlessly on your test service while also checking the quality -by running all tests uploaded your version control. +sagdevops-ci-assets is a library that easily enables CI for your webMethods projects. You can setup your infrastructure in minutes and then deploy flowlessly on your test service while also checking the quality by running all tests uploaded your version control. + +Note: the scripts in this repository are not project specific, they are generic and can be used by multiple projects (in parallel). For this tutorial, the project specific scripts and source code assets like IS packages are stored in a separate repository ([https://github.com/SoftwareAG/webmethods-sample-project-layout](https://github.com/SoftwareAG/webmethods-sample-project-layout)) and are "referenced" by the pipeline setup described below. ## Set-up ### webMethods Installation Prepare your webMethods installation - your build server can contain only a plain IntegrationServer with Deployer. Keep the server plain - there is no need for designer or database connection. + Your test server can be more complex as CI will execute unit and integration tests against it. The build and the test server must reach each other over http so that the deployment and the testing can be performed. ### CI Library -Download the library on your build server by +Download the sagdevops-ci-assets respository on your build server by ``` git clone https://github.com/SoftwareAG/sagdevops-ci-assets.git ``` -Edit the _System.properties_ to correspond to your inftrastucture - deployerHost is the machine where your Deployer is running(normally the build server) where targetHost is your test server - where the packages will be deployed and tested. +Edit the _System.properties_ to correspond to your infrastructure: -*Notice* change the path the Deployer if you're not using the _default_ Integration Server instance. +* __config.deployer.*__: Configuration parameters which specify your Deployer installation and Deployer server +* __config.tmpdir__: Points to a tempory directory where assets are stored for the deployment process. **Note**: take care to clean this directory up regularly! +* __config.build.buildStorageDir__: Where to store the file based repositories created by the Asset Build Environment +* __config.deployer.projectNamePrefix__: Defines the Deployer Project Name prefix. Can either be static, e.g. "BDA", or it can be dynamic, e.g. "Jenkins_${env.BUILD_NUMBE}" +* __config.libs.resolve__: + * Set to "**remote**" if dependent jars should be downloaded from remote maven repositories with Apache Ivy. See "[resources/ivy/ivy-remote.xml](resources/ivy/ivy-remote.xml)" for list of jars. See "[resources/ivy/ivysettings-remote.xml](resources/ivy/ivysettings-remote.xml)" for list of repositories from which jars are downloaded. **Note**: Software AG jars are referenced with a filesystem resolver pointing to the respective local installation. + * Set to "**local**" if no internet connection is available and place necessary jars (see "[resources/ivy/ivy-remote.xml](resources/ivy/ivy-remote.xml)" for list) into folder "**lib/ext**". -### Build/CI Environment -* On your build server set-up the default java on PATH to be the JDK that comes with your webMethods installation ${SAG_HOME}/jvm/jvm/bin/java +### Build/CI Environment -* On your build server install Jenkins CI Server and run it with the same user that run your webMethods processes. The CI can work also with other CI server, but for the reference implementation we've chosen Jenkins. +Setup your build server in the following way: + +* Add the default java to your PATH variable. Use the JDK that comes with your webMethods installation ${SAG_HOME}/jvm/jvm/bin/java +* Install Jenkins CI Server v2 ([https://jenkins.io/](https://jenkins.io/)) and run it with the same user that run your webMethods processes. This webMethods CI framework can work also with other CI servers, but for the reference implementation we've chosen Jenkins 2 with native Pipeline support. -* In Jenkins->Manage Jenkins->Configure System->EnvironmentVariables define the following environment variables: -1. SAG_CI_HOME = path_to_th_sagdevops-ci-assets on the local file system. -2. SAG_HOME = path_to_your_local_webmethods_installation - -Use slash "/" as path separator. Example: "C:/SoftwareAG", instead of "C:\SoftwareAG". +Setup your Jenkins server in the following way: +* In "Jenkins > Manage Jenkins > Configure System > EnvironmentVariables" define the following environment variables: + 1. __SAG_CI_HOME__: path_to_the_sagdevops-ci-assets on the local file system. I.e., if you have cloned the sagdevops-ci-assets respository to the directory "/home/user/sag/sagdevops-ci-assets", the set "SAG_CI_HOME=/home/user/sag/sagdevops-ci-assets" in Jenkins. + 2. __SAG_HOME__: path_to_your_local_webmethods_installation +Note: Use slash "/" as path separator. Example: "C:/SoftwareAG", instead of "C:\SoftwareAG". ### Jenkins Pipeline Job -In Jenkins, create a new item from type pipeline. Give it a **unique name** as we use the job name as identifier further down the process. Scroll down the page to the pipeline definition -and choose _Pipeline definition from SCM_. Choose git as system and give the url of the webmethods-sample-project-layout - _https://github.com/SoftwareAG/webmethods-sample-project-layout.git_ +In Jenkins, create a new item from type pipeline. Give it a **unique name** as we use the job name as identifier further down the process. Scroll down the page to the pipeline definition and choose _Pipeline definition from SCM_. Choose git as system and give the url of the webmethods-sample-project-layout [https://github.com/SoftwareAG/webmethods-sample-project-layout.git](https://github.com/SoftwareAG/webmethods-sample-project-layout.git). -This sample project contains two pre-created pipeline definitions - Jenkinsfile.win and Jenkinsfile.unix that run on the respective operating systems. Type in the correct file in respect of you -build server OS. +This sample project contains two pre-created pipeline definitions - Jenkinsfile.win and Jenkinsfile.unix that run on the respective operating systems. Type in the correct file in respect of you build server OS. Those pipeline definition are orchestrating all steps around the build, deploy and the test on your server. If the all environment variables are set correctly you should not change anything here. ## How it works -After your pipeline job is set-up, trigger it. It will download the pipeline description automatically, then checkout the sources, build the core, deploy the code and run tests. -Whenever a developer checks in new IS packages and Tests those will be automatically deployed and all new tests will be executed. For this to work, the structure defined here _https://github.com/SoftwareAG/webmethods-sample-project-layout.git_ has followed. - -## Notice -The wM Test Suite tests will have to be places in a directory a *setup* directory inside the test project, so that it can be picked up by the test executor. - - - +After your pipeline job is set-up, trigger it. It will download the pipeline description automatically for the webmethods-sample-project-layout repository, then checkout the sources, build the code (using Asset Build Environment), create a project in Deployer referencing the build (using Project Automator), deploy the code using Deployer and finally run tests using the WmTestSuite. + +Whenever a developer checks in new IS packages or WmTestSuite tests, those will be automatically deployed and all new tests will be executed. For this to work, the structure defined in the project [https://github.com/SoftwareAG/webmethods-sample-project-layout.git](https://github.com/SoftwareAG/webmethods-sample-project-layout.git) has to be adhered. +## Test Execution +The wM Test Suite tests will have to be places in a directory a *setup* directory inside the test project, so that it can be picked up by the test executor. Please see [https://github.com/SoftwareAG/webmethods-sample-project-layout](https://github.com/SoftwareAG/webmethods-sample-project-layout) for details. diff --git a/System.properties b/System.properties old mode 100644 new mode 100755 index 26c0027..521013c --- a/System.properties +++ b/System.properties @@ -1,16 +1,33 @@ #System properties for the deployer solution # Use slash "/" as path separator. Example: Use "C:/SoftwareAG", instead of "C:\SoftwareAG". -deployerInstallationPath=${SAGHome}/IntegrationServer/instances/default/packages/WmDeployer/bin/ +config.deployer.deployerInstallationPath=${SAGHome}/IntegrationServer/instances/default/packages/WmDeployer/bin/ +#config.deployer.deployerInstallationPath=${SAGHome}/IntegrationServer/instances/deployer/packages/WmDeployer/bin/ #Build Server Settings -deployerHost=localhost -deployerPort=5555 -deployerUsername=Administrator -deployerPassword=manage - -#Target CI Test Server Settings -testISHost=localhost -testISPort=5555 -testISUsername=Administrator -testISPassword=manage +config.deployer.deployerHost=localhost +config.deployer.deployerPort=5599 +config.deployer.deployerUsername=Administrator +config.deployer.deployerPassword=manage + +# where to store temporary files +config.tmpdir=tmp + +# where to store the file based repositories created by the ABE +config.build.buildStorageDir=${config.tmpdir}/fbr + +#config.deployer.projectNamePrefix=Jenkins_${env.BUILD_NUMBER} +config.deployer.projectNamePrefix=BDA + +config.build.abeHome=${SAGHome}/common/AssetBuildEnvironment + +# where to get external jars from, either "remote" (needs internet connection to access remote maven repos), or "local" (assumes libs are manually downloaded and placed in "lib/ext" folder) +config.libs.resolve=remote + +# IS Continuous Code Review +config.isccr.enable=false +# Install dir of ISCCR +config.isscr.home=c:/Tools/ISCCRv4 + +# configure a reportDir, should be the same as configured in the Jenkins pipeline +config.test.reportDir=${env.WORKSPACE}/report \ No newline at end of file diff --git a/build.xml b/build.xml old mode 100644 new mode 100755 index 000f692..07cf3d7 --- a/build.xml +++ b/build.xml @@ -1,29 +1,56 @@ - - - - - - - - - + + + + + + + + + + - - - + Build project. - - + + Deploy project. - + + Extract varsub. + + + + + Preparing donwload spec. + + + + Preparing donwload spec. + + + Run tests. - + + + + Run static code analysis. + Build and deploy the project. + + + _TEST + + + + + Get jars from remote repo with ivy and then retrieve into local lib/ext folder + + diff --git a/buildCallABE.xml b/buildCallABE.xml old mode 100644 new mode 100755 index 75abd0b..5dece1f --- a/buildCallABE.xml +++ b/buildCallABE.xml @@ -1,46 +1,49 @@ - - - - - + - + - + + + + - - - - invoke ABE Build - + + invoke ABE Build + - - + + - + - - + + - - + + - + - + - + - + - - + + + + + diff --git a/buildDependencies.xml b/buildDependencies.xml new file mode 100644 index 0000000..711ab9c --- /dev/null +++ b/buildDependencies.xml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + + Download and store ivy.jar locally + + + + + + + + + + + + + + + downloads ivy jar, then resolves all necessary jars by using the resources/ivy.ivy.xml dependency declaration + Using ivy to download necessary jars as specified in the '${ivy.xml}' file. + + + + + + + + \ No newline at end of file diff --git a/buildDeployer.xml b/buildDeployer.xml old mode 100644 new mode 100755 index 9e903fc..1e95d4b --- a/buildDeployer.xml +++ b/buildDeployer.xml @@ -1,24 +1,7 @@ - - - - - - - - - - - - - - - - - + - @@ -27,94 +10,226 @@ - + - + - - - - + + + + + + + + + + + + - - - - - - - - + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - + + + + - - - - - + + + + + + + + + + + + + - + + + + + - - - - + - - + + + + + + + + - + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + - - - - - - - - - - - + + + + + + + + + + + + + + - - - + + + + + - + + + + - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildDeployer_Varsub.xml b/buildDeployer_Varsub.xml new file mode 100755 index 0000000..66b411a --- /dev/null +++ b/buildDeployer_Varsub.xml @@ -0,0 +1,107 @@ + + + + + + This build file provides a utility with which variable substitution templates can be created. + For this, a variable substitution file is extracted from Deployer and the variable substitution templates are extracted from it dynamically. + Provide the following parameters: + - bda.projectName: The name of the Deployer project + - bda.deploymentMapName: The name of the deployment map in the Deployer project from which to extract the variable substitution file + - bda.targetEnv: The target environment for which to create the variable substitution templates, e.g. DEV, TEST, QA or PROD + - bda.varsubDir: The directory where to store the generated varsub templates. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildISCCR.xml b/buildISCCR.xml new file mode 100644 index 0000000..b1f73d2 --- /dev/null +++ b/buildISCCR.xml @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildInitProject.xml b/buildInitProject.xml old mode 100644 new mode 100755 index 2f9295f..8870e1d --- a/buildInitProject.xml +++ b/buildInitProject.xml @@ -1,95 +1,194 @@ - - + + + + + + + + - - + + - - + + - - + + - - - + + + - - - + + + - - - + + + - + - - + + - - - + + + - - - + + + + + - - - + + + - - - - + + - - - + + + - - - + + + - - - + + + - - - + + + - - + + + - - + + + + + - - - - + + + + + + + + + + + + + + + - - + - - + + + + + - + + + + + + + + + + - + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildRepo.xml b/buildRepo.xml new file mode 100755 index 0000000..2abc6ff --- /dev/null +++ b/buildRepo.xml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/buildTest.xml b/buildTest.xml old mode 100644 new mode 100755 index 4cd2957..e8d334c --- a/buildTest.xml +++ b/buildTest.xml @@ -1,83 +1,99 @@ - + - - - + + - - - - - + + + + + - - - - - - + + + + + + + + + + + + + + + + + + + + - + - - + + + - - - + + + - - - - - + + + + + + + - + - - - + + - - - + + + - + - - - + + + - + @@ -85,66 +101,83 @@ - + - - - - + + + + - + + - + - - + - - - - - - - - - - Unit test failure - - - - - - + + + - + - - - - - - - - + + + + + def configSlurper = new ConfigSlurper(properties.'bda.targetEnv') + configSlurper.classLoader = this.class.getClassLoader() + def config = configSlurper.parse(new File(properties.'environmentsDefinition').toURL()) + if(config.IntegrationServers.size() > 0 ) { + config.IntegrationServers.each { name, isConfig -> + def host = isConfig.host + def port = isConfig.port + def user = isConfig.username + def pwd = isConfig.pwd + + def ac = project.createTask("antcall"); + ac.setTarget("bda.test.executeTestAndReportXML"); + // necessary to inheritAll and inheritRefs because of bda.classpath path reference + ac.setInheritAll(true) + ac.setInheritRefs(true) + def acparam1 = ac.createParam(); + acparam1.setName("host"); + acparam1.setValue(host); + def acparam2 = ac.createParam(); + acparam2.setName("port"); + acparam2.setValue(port); + def acparam3 = ac.createParam(); + acparam3.setName("user"); + acparam3.setValue(user); + def acparam4 = ac.createParam(); + acparam4.setName("pwd"); + acparam4.setValue(pwd); + def acparam5 = ac.createParam(); + acparam5.setName("testPackageName"); + acparam5.setValue(properties.'testPackageName'); + ac.execute(); + } + } + - + diff --git a/build_QuiesceDeployment.xml b/build_QuiesceDeployment.xml new file mode 100644 index 0000000..0f10a89 --- /dev/null +++ b/build_QuiesceDeployment.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/lib/ant-contrib.jar b/lib/ant-contrib.jar deleted file mode 100644 index 9c9503f..0000000 Binary files a/lib/ant-contrib.jar and /dev/null differ diff --git a/lib/bda-utils.jar b/lib/bda-utils.jar new file mode 100644 index 0000000..d1c6e55 Binary files /dev/null and b/lib/bda-utils.jar differ diff --git a/lib/ext/README.md b/lib/ext/README.md new file mode 100644 index 0000000..1e0e274 --- /dev/null +++ b/lib/ext/README.md @@ -0,0 +1 @@ +Place any external jars necessary for your build into this directory... All jars in this directory will be added to the global classpath "bda.classpath". \ No newline at end of file diff --git a/lib/httpunit.jar b/lib/httpunit.jar deleted file mode 100644 index 33e88d0..0000000 Binary files a/lib/httpunit.jar and /dev/null differ diff --git a/lib/jars.txt b/lib/jars.txt new file mode 100644 index 0000000..e6aa682 --- /dev/null +++ b/lib/jars.txt @@ -0,0 +1,9 @@ +WmTestSuite jars +- wmtestsuite/serviceInterceptor.jar --> WmTestSuite 9.12 +- wmtestsuite/serviceMockClient.jar --> WmTestSuite 9.12 + +TaskDef Jars +- bda-utils --> custom logger --> sources included, build script resources/BDA/build_compile.xml + +All other jars are provided by Ivy, see resources/ivy/ivy.xml. +If Ivy is not used, place necessary jars in "lib/ext" folder. Use "bda.dependencies.util.downloadJarsToLibFolder" target to download jars locally into lib/ext folder. diff --git a/lib/junit-4.8.2.jar b/lib/junit-4.8.2.jar deleted file mode 100644 index 5b4bb84..0000000 Binary files a/lib/junit-4.8.2.jar and /dev/null differ diff --git a/lib/serviceInterceptor.jar b/lib/wmtestsuite/serviceInterceptor.jar old mode 100644 new mode 100755 similarity index 100% rename from lib/serviceInterceptor.jar rename to lib/wmtestsuite/serviceInterceptor.jar diff --git a/lib/serviceMockClient.jar b/lib/wmtestsuite/serviceMockClient.jar old mode 100644 new mode 100755 similarity index 100% rename from lib/serviceMockClient.jar rename to lib/wmtestsuite/serviceMockClient.jar diff --git a/lib/xmlunit1.0.jar b/lib/xmlunit1.0.jar deleted file mode 100644 index 6e8ed58..0000000 Binary files a/lib/xmlunit1.0.jar and /dev/null differ diff --git a/master_build_Reference/build.properties b/master_build_Reference/build.properties old mode 100644 new mode 100755 index dac7719..98101ee --- a/master_build_Reference/build.properties +++ b/master_build_Reference/build.properties @@ -22,7 +22,7 @@ build.output.dir=${buildOutputDir} # Normally you download them from a version control system (VCS) to your HDD. # Use slash "/" as path separator. Example: Use "C:/SoftwareAG", instead of "C:\SoftwareAG". # More than one directory can be specified by using ; delimiter. Please do not use blank symbols around the delimiter. -build.source.dir=${isProjectsDir};${isConfigDir};${bpmProjectsDir};${mwsProjectsDir};${rulesProjectsDir};${umProjectsDir} +build.source.dir=${isProjectsDir};${bpmProjectsDir};${mwsProjectsDir};${rulesProjectsDir};${umProjectsDir}; # Use slash "/" as path separator. Example: Use "C:/SoftwareAG", instead of "C:\SoftwareAG". # More than one directory can be specified by using ; delimiter. Please do not use blank symbols around the delimiter. @@ -32,7 +32,8 @@ build.source.project.dir= # This is the version of the current builds. A auto incremental build number will be appended to this to # get the final build number. For example, the build for version 9.7 will have a build number = 9.8.1 and # the second build number will be 9.8.2. -build.version=1.0 +### we will inject the build.version variable from the outside +# build.version=1.0 # Enable/disable the check out task @@ -60,9 +61,9 @@ enable.build.AgileApps=false #Logging properties -build.log.enable=false -build.log.fileName= -build.logLevel=info +build.log.enable=true +build.log.fileName=logs/testautomation_abe.log +build.logLevel=debug #Exit on Error or Warning Property build.exit.error=false diff --git a/replicate/inbound/BDA_TEST_henningGitHubPipeline.vs b/replicate/inbound/BDA_TEST_henningGitHubPipeline.vs new file mode 100644 index 0000000..a052a16 --- /dev/null +++ b/replicate/inbound/BDA_TEST_henningGitHubPipeline.vs @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/resources/BDA/build_compile.xml b/resources/BDA/build_compile.xml new file mode 100755 index 0000000..576c27f --- /dev/null +++ b/resources/BDA/build_compile.xml @@ -0,0 +1,41 @@ + + + This project is for the compilation of helper/util classes for the BDA project. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/ProjectAutomator/CreateProjectAutomatorTemplate.groovy b/resources/ProjectAutomator/CreateProjectAutomatorTemplate.groovy new file mode 100755 index 0000000..22b6414 --- /dev/null +++ b/resources/ProjectAutomator/CreateProjectAutomatorTemplate.groovy @@ -0,0 +1,367 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.projectAutomator + +import groovy.xml.dom.DOMCategory +import groovy.xml.MarkupBuilder +import groovy.xml.StreamingMarkupBuilder +import groovy.xml.XmlUtil + +def cli = new CliBuilder (usage:'createdeployerproject.groovy [options]') +// command line argument line example: -r c:\Users\Administrator\github\henning-sagdevops-ci-assets\tmp\fbr\Henning-local-sagdevops-ci-assets_fbrRepo -f out.xml -p BDA_TEST_Henning-local-sagdevops-ci-assets -t TEST -d localhost:5555 -u Administrator -s manage -e c:\Users\Administrator\github\henning-webmethods-sample-project-layout\ENV.groovy -repoName repoName -splitDeploymentSets false +cli.with { + h longOpt:'help', 'Usage information' + r longOpt:'repository',argName:'repository', args:1, 'Path to ABE file based repository' + f longOpt:'outputfile', argName:'outputfile', args:1, 'Example: projectAutomator.xml' + p longOpt:'project', argName:'project', args:1, 'Example: BDA_Automator_Project' + t longOpt:'target', argName:'target', args:1, 'Must have a matching entry in the ENV.groovy flie. Example: DEV.' + d longOpt:'deployer.hostPort', argName:'deployerHostPort', args:1, 'Example: localhost:5555.' + u longOpt:'deployer.user', argName:'deployerUser', args:1, 'Example: Administrator.' + s longOpt:'deployer.password', argName:'deployerPassword', args:1, 'Example: manage.' + e longOpt:'environments', argName:'environments', args:1, 'Example: ENV.groovy.' + repoName longOpt:'repoName', argName:'repoName', args:1, 'Name of the Repository to create in deployer' + splitDeploymentSets longOpt:'splitDeploymentSets', argName:'splitDeploymentSets', args:1, 'Boolean. If true, for each IntegrationServer a separate Deployment Set is created and deployments are done sequentially. Default: false.' +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return + if(opts.help) { + cli.usage() + return + } + +assert opts + +project = opts.p +def repo = opts.r +def outputFile = opts.f +target = opts.t +def deployerHostPort = opts.d +def deployerUser = opts.u +def deployerPassword = opts.s +def environments = opts.e +repoName = opts.repoName +splitDeploymentSets = new Boolean(opts.splitDeploymentSets) + +println "> Creating Project Automator templates with the following properties:" +println "\t- Project: '${project}'" +println "\t- Repository: '${repo}'" +println "\t- Output File: '${outputFile}'" +println "\t- Target: '${target}'" +println "\t- Deployer host: '${deployerHostPort}'" +println "\t- Deployer user: '${deployerUser}'" +println "\t- Deployer passsword: *****" +println "\t- Environments definition: '${environments}'" +println "\t- Repository name: '${repoName}'" +println "\t- SplitDeploymentSets: '${splitDeploymentSets}'" + +assert project +assert repo +assert outputFile +assert target +assert deployerHostPort +assert deployerUser +assert deployerPassword +assert environments +assert repoName + +//println "${opts.arguments()}" + +def outFile = new File(outputFile) +//def writer = new FileWriter(outFile) +PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(outputFile))); + +//def xml = new MarkupBuilder(writer) +def xml = new StreamingMarkupBuilder(); +packages = [] +bpms = [] +mws = [] +tn = [] + + +def configSlurper = new ConfigSlurper(target) +configSlurper.classLoader = this.class.getClassLoader() +config = configSlurper.parse(new File(environments).toURL()) +//def config = configSlurper.parse(configText) + +// first parse the file-based repository + +def repoDir = new File(repo) +assert repoDir.exists() : "Repository directory '${repo}' does not exist" + +// we only support packages, processes and MWS projects currently... +repoDir.eachDirRecurse() { dir -> + dir.eachFileMatch(~/.*.acdl/) { file -> + def doc = groovy.xml.DOMBuilder.parse(new FileReader(file)) + def asset_composite = doc.documentElement + use (DOMCategory) { + def implementation_generic = asset_composite.'implementation.generic' + def type=implementation_generic.'@type'[0] + def displayName = asset_composite.'@displayName' + def name = asset_composite.'@name' + + println "found acdl ${file} of type=${type} with name=${name} and displayName=${displayName}" + + if(type == 'bpmprocess') { + bpms.add([name: name, displayName: displayName]) + } else if (type == 'ispackage' || type == 'isconfiguration') { + packages.add([name: name, displayName: name]) + } else if (type == 'war') { + mws.add([name: name, displayName: displayName]) + } else if (type == 'pdp') { + mws.add([name: name, displayName: displayName]) + } else if (type == 'cdp') { + mws.add([name: name, displayName: displayName]) + } + } + } +} + +/** + * craeetes a deployment maps for IntegrationServers only, if splitDeploymentSets is true + * @param xml + * @param splitDeploymentSets + * @return + */ +def createISOnlyDeploymentMapSetMappingAndDeploymentCandidate(splitDeploymentSets) { + if( splitDeploymentSets ) { + if(config.IntegrationServers.size() > 0 && !packages.empty) { + def ds = new StreamingMarkupBuilder().bind() { + config.IntegrationServers.keySet().each { + def isAliasName = it.toString() + DeploymentMap(description:"Deployment Map for IS DeploymentSet to IS node ${isAliasName}", name:"IS_DeploymentMap_${isAliasName}") + } + config.IntegrationServers.keySet().each { + def isAliasName = it.toString() + MapSetMapping(mapName: "IS_DeploymentMap_${isAliasName}", setName: "IS_DeploymentSet_${isAliasName}") { + alias(type:'IS', "${target}_${isAliasName}") + } + } + config.IntegrationServers.keySet().each { + def isAliasName = it.toString() + DeploymentCandidate(description: "Deployment to IS node ${isAliasName} only", mapName: "IS_DeploymentMap_${isAliasName}", name: "IS_Deployment_${isAliasName}") + } + } + String output = ds.toString() + return output + } + } + return "" +} + +/** + * craeetes a standard deployment map set mapping for the standard deployment set (named DeploymentSet) + * IS assets will only be added if splitDeploymentSets==false + * @param xml + * @return + */ +def createDeploymentMapSetMappingAndDeploymentCandidate(splitDeploymentSets) { + boolean hasChildren = false; + def ds = new StreamingMarkupBuilder().bindNode() { + DeploymentMap(description:"Deployment Map for standard Deployment Set", name:"DeploymentMap") + MapSetMapping(mapName: 'DeploymentMap', setName: 'DeploymentSet') { + if(config.IntegrationServers.size() > 0 && !splitDeploymentSets) { + config.IntegrationServers.keySet().each { + alias(type:'IS', "${target}_${it}") + hasChildren = true + } + } + if(config.ProcessModels.size() > 0 ) { + config.ProcessModels.keySet().each { + alias(type:'BPM', "${target}_${it}") + hasChildren = true + } + } + if(config.MWS.size() > 0 ) { + config.MWS.keySet().each { + alias(type:'MWS', "${target}_${it}") + hasChildren = true + } + } + } + DeploymentCandidate(description: "Deployment", mapName: "DeploymentMap", name: "Deployment") + } + def output = ds.toString() + if( hasChildren ) { + return output; + } + return ""; +} + +/** + * Creates the DelpoymentSet for IS only, one DeploymentSet for each configured IS target node. + * The DeploymentSets will only be created if the parameter splitDeploymentSet is true + * @param xml + * @param splitDeloymentSets + * @return + */ +def createISOnlyDeploymentSets(splitDeploymentSets) { + if( splitDeploymentSets ) { + if(config.IntegrationServers.size() > 0 && !packages.empty) { + def ds = new StreamingMarkupBuilder().bind() { + config.IntegrationServers.keySet().each { + DeploymentSet (autoResolve:'ignore', description: 'Deployment set containing only is assets for IS target node ' + it, name: "IS_DeploymentSet_${it}", srcAlias:"${repoName}") { + packages.each() { + Composite (name:"${it.name}", displayName:"${it.name}", srcAlias:"${repoName}", type:'IS') + } + } + } + } + String output = ds.toString() + return output + } + } + return "" +} + +/** + * craeetes a standard deployment set for all collected assets + * IS assets will only be added if splitDeploymentSets==false + * @param xml + * @return + */ +def createDeploymentSets(splitDeploymentSets) { + boolean hasChildren = false + def builder = new StreamingMarkupBuilder() + def ds = builder.bindNode() { + DeploymentSet (autoResolve:'ignore', description: 'deployment set containing all assets', name: 'DeploymentSet', srcAlias:"${repoName}") { + if (!packages.empty && !splitDeploymentSets) { + packages.each() { + Composite (name:"${it.name}", displayName:"${it.name}", srcAlias:"${repoName}", type:'IS') + hasChildren = true + } + } + if (!bpms.empty) { + bpms.each() { + Composite (name:"${it.name}", displayName:"${it.displayName}", srcAlias:"${repoName}", type:'BPM') + hasChildren = true + } + } + if (!mws.empty) { + mws.each() { + Composite (name:"${it.name}", displayName:"${it.displayName}", srcAlias:"${repoName}", type:'MWS') + hasChildren = true + } + } + } + } + def output = ds.toString() + if( hasChildren ) { + return output; + } + return ""; +} + +// creation of deployerspec +//xml.setDoubleQuotes(true) +def nxml = xml.bind() { + DeployerSpec(exitOnError:'true', sourceType:'Repository') { + DeployerServer { + host("${deployerHostPort}") + user("${deployerUser}") + pwd("${deployerPassword}") + } + Environment { + if(config.IntegrationServers.size() > 0 ) { + IS { + config.IntegrationServers.each { name, isConfig -> + /* + * Merge explicit values in isConfig (ConfigSlurfer ConfigObject) with default + * values in config.IntegrationServer.defaults (hashmap), then merge again + * with explicit values (isConfig), so that default values do not replace + * explicit values, but only append + */ + def integ = isConfig + config.IntegrationServer.defaults + isConfig; + isalias(name: "${target}_${name}") { + host(integ.host) + port(integ.port) + user(integ.username) + if( integ.isSet('pwdHandle') ) { + pwdHandle(integ.pwdHandle) + } else { + pwd(integ.pwd) + } + useSSL(integ.useSSL) + version(integ.version) + installDeployerResource(integ.installDeployerResource) + Test(integ.test) + } + } + } + } + if(config.ProcessModels.size() > 0 ) { + ProcessModel { + config.ProcessModels.each { name, bpmConfig -> + def bpm = bpmConfig + config.ProcessModel.defaults + bpmConfig + pmalias(name: "${target}_${name}") { + host(bpm.host) + port(bpm.port) + user(bpm.username) + if( bpm.isSet('pwdHandle') ) { + pwdHandle(bpm.pwdHandle) + } else { + pwd(bpm.pwd) + } + useSSL(bpm.useSSL) + version(bpm.version) + Test(bpm.test) + } + } + } + } + if(config.MWS.size() > 0 ) { + MWS { + config.MWS.each { name, mwsConfig -> + def mwsConfigMerged = mwsConfig + config.MyWebmethodsServer.defaults + mwsConfig + mwsalias(name: "${target}_${name}") { + host(mwsConfigMerged.host) + port(mwsConfigMerged.port) + user(mwsConfigMerged.username) + if( mwsConfigMerged.isSet('pwdHandle') ) { + pwdHandle(mwsConfigMerged.pwdHandle) + } else { + pwd(mwsConfigMerged.pwd) + } + useSSL(mwsConfigMerged.useSSL) + version(mwsConfigMerged.version) + excludeCoreTaskEngineDependencies(mwsConfigMerged.excludeCoreTaskEngineDependencies) + cacheTimeOut(mwsConfigMerged.cacheTimeOut) + includeSecurityDependencies(mwsConfigMerged.includeSecurityDependencies) + rootFolderAliases(mwsConfigMerged.rootFolderAliases) + maximumFolderObjectCount(mwsConfigMerged.maximumFolderObjectCount) + enableAddtionalLogging(mwsConfigMerged.enableAddtionalLogging) + maxFolderDepth(mwsConfigMerged.maxFolderDepth) + Test(mwsConfigMerged.test) + } + } + } + } + Repository { + repalias(name: "${repoName}") { + type("FlatFile") + urlOrDirectory("${repo}") + Test('true') + } + } + } + Projects(projectPrefix: '') { + Project(description: "Generated by script on ${new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date())}", name: "${project}", overwrite: 'true', type:'Repository') { + ProjectProperties { + Property(name:'projectLocking', 'false') + Property(name:'concurrentDeployment', 'false') + Property(name:'ignoreMissingDependencies', 'true') + Property(name:'isTransactionalDeployment', 'true') + } + mkp.yieldUnescaped createISOnlyDeploymentSets(splitDeploymentSets) + mkp.yieldUnescaped createDeploymentSets(splitDeploymentSets) + + mkp.yieldUnescaped createISOnlyDeploymentMapSetMappingAndDeploymentCandidate(splitDeploymentSets) + mkp.yieldUnescaped createDeploymentMapSetMappingAndDeploymentCandidate(splitDeploymentSets) + } + } + } +} +println XmlUtil.serialize( nxml, writer ) + +writer.close() +println "Successfully create Project Automator Template '${outFile.getAbsolutePath()}'" + diff --git a/resources/ProjectSpecificationTemplate.xml b/resources/ProjectSpecificationTemplate.xml deleted file mode 100644 index e328ad0..0000000 --- a/resources/ProjectSpecificationTemplate.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/resources/isccr/config_bda.xml b/resources/isccr/config_bda.xml new file mode 100644 index 0000000..3a93f76 --- /dev/null +++ b/resources/isccr/config_bda.xml @@ -0,0 +1,635 @@ + + + + + + + + Package Name + ${code.review.pkgname} + + + + Package prefix + ${code.review.pkgprefix} + + + + Package Directory + ${code.review.directory} + + + + output directory for the reports + ${code.review.output.directory} + + + + Pre-pend the date to the generated report file names to allow for historical comparison over time + false + + + + Root folder prefix used in all services + com.softwareag + + + + + Path to the xslt file for report generation + ${code.review.xsltReportFile} + + + Path to the xslt file for a multi package report generation + GenerateMultiReport.xsl + + + + + Ignore Failures and continue with the build irrespective of the code quality + false + + + + Logger Output format + + INFO + + + + + Timout (seconds) of a single check + 120 + + + + + 3 + + + + + 1 + + + + + + + + com.softwareag.gcs.wm.codereview.operations.ServiceProperties + + stateless + yes + + + + + No Save/Restore Pipeline options should be set against the service. The Save/Restore pipeline options where set can result in unnecessary IO and therefore gives a reduced service performance as these services need to read or write to the disk and concurrent service execution could be compromised. It is recommended to remote these options from the services before deployment to a non-development environment, or use the extended property (watt.server.pipeline.processor) to disable these in the non-development environments. Where this is the case, this rule can be ignored + + com.softwareag.gcs.wm.codereview.operations.ServiceProperties + + pipeline_option + 0 + + 1 + + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[@SERVICE='pub.flow:savePipelineToFile' or @SERVICE='pub.flow:savePipeline'])=0 + count(//INVOKE[@SERVICE='pub.flow:restorePipelineFromFile' or @SERVICE='pub.flow:restorePipeline'])=0 + count(//INVOKE[@SERVICE='pub.flow:tracePipeline'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:savePipelineToFile' or @SERVICE='pub.flow:savePipeline'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:restorePipelineFromFile' or @SERVICE='pub.flow:restorePipeline'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:tracePipeline'])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[@SERVICE='pub.flow:clearPipeline'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:clearPipeline'])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + + count(//INVOKE[@SERVICE='pub.flow:setResponse'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:setResponse'])=0 + count(//INVOKE[@SERVICE='pub.client:soapHTTP'])=0 + count(//MAPINVOKE[@SERVICE='pub.client:soapHTTP'])=0 + count(//INVOKE[@SERVICE='pub.client:soapRPC'])=0 + count(//MAPINVOKE[@SERVICE='pub.client:soapRPC'])=0 + count(//INVOKE[@SERVICE='pub.event.eda:eventToDocument'])=0 + count(//MAPINVOKE[@SERVICE='pub.event.eda:eventToDocument'])=0 + count(//INVOKE[@SERVICE='pub.event.eda:send'])=0 + count(//MAPINVOKE[@SERVICE='pub.event.eda:send'])=0 + count(//INVOKE[@SERVICE='pub.publish:syncToBroker'])=0 + count(//MAPINVOKE[@SERVICE='pub.publish:syncToBroker'])=0 + count(//INVOKE[@SERVICE='pub.security:setKeyAndChain'])=0 + count(//MAPINVOKE[@SERVICE='pub.security:setKeyAndChain'])=0 + count(//INVOKE[@SERVICE='pub.security.pkcs7:sign'])=0 + count(//MAPINVOKE[@SERVICE='pub.security.pkcs7:sign'])=0 + count(//INVOKE[@SERVICE='pub.smime:createSignedAndEncryptedData'])=0 + count(//MAPINVOKE[@SERVICE='pub.smime:createSignedAndEncryptedData'])=0 + count(//INVOKE[@SERVICE='pub.smime:createSignedData'])=0 + count(//MAPINVOKE[@SERVICE='pub.smime:createSignedData'])=0 + count(//INVOKE[@SERVICE='pub.smime:processEncryptedData'])=0 + count(//MAPINVOKE[@SERVICE='pub.smime:processEncryptedData'])=0 + count(//INVOKE[@SERVICE='pub.soap.handler:addHeaderElement'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.handler:addHeaderElement'])=0 + count(//INVOKE[@SERVICE='pub.soap.handler:getHeaderElement'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.handler:getHeaderElement'])=0 + count(//INVOKE[@SERVICE='pub.soap.handler:registerConsumer'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.handler:registerConsumer'])=0 + count(//INVOKE[@SERVICE='pub.soap.handler:registerProvider'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.handler:registerProvider'])=0 + count(//INVOKE[@SERVICE='pub.soap.handler:removeHeaderElement'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.handler:removeHeaderElement'])=0 + count(//INVOKE[@SERVICE='pub.soap.processor:list'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.processor:list'])=0 + count(//INVOKE[@SERVICE='pub.soap.processor:processMessage'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.processor:processMessage'])=0 + count(//INVOKE[@SERVICE='pub.soap.processor:processRPCMessage'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.processor:processRPCMessage'])=0 + count(//INVOKE[@SERVICE='pub.soap.processor:registerProcessor'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.processor:registerProcessor'])=0 + count(//INVOKE[@SERVICE='pub.soap.processor:unregisterProcessor'])=0 + count(//MAPINVOKE[@SERVICE='pub.soap.processor:unregisterProcessor'])=0 + count(//INVOKE[starts-with(@SERVICE,'pub.pki')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'pub.pki')])=0 + count(//INVOKE[starts-with(@SERVICE,'wm.server.pki')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'wm.server.pki')])=0 + count(//INVOKE[starts-with(@SERVICE,'pub.vcs')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'pub.vcs')])=0 + count(//INVOKE[starts-with(@SERVICE,'wm.server.vcs')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'wm.server.vcs')])=0 + count(//INVOKE[starts-with(@SERVICE,'wm.vcs')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'wm.vcs')])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//*[@DISABLED='true'])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[starts-with(@SERVICE,'pub.storage')])=0 + count(//MAPINVOKE[starts-with(@SERVICE,'pub.storage')])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[@SERVICE='pub.flow:debugLog'])=0 + count(//MAPINVOKE[@SERVICE='pub.flow:debugLog'])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FilteredFlowXMLXpathChecker + + + ^%folder-prefix%\.*\S*\.pub[\.\:]\S* + + count(//SEQUENCE[@EXIT-ON='SUCCESS' and SEQUENCE[@EXIT-ON='FAILURE'] and SEQUENCE[@EXIT-ON='DONE']]) > 0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.ConnectionsChecker + + + false + + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[@SERVICE='pub.client.ftp:login' and not(//@FIELD='/timeout;1;0')])=0 + count(//MAPINVOKE[@SERVICE='pub.client.ftp:login' and not(//@FIELD='/timeout;1;0')])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//INVOKE[string-length(COMMENT)=0 and not(@DISABLED)])=0 + + + com\.softwareag\.example\.pub\:addNumber + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//SEQUENCE[string-length(COMMENT)=0 and not(@DISABLED)])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.OrphanedChecker + + + ^%folder-prefix%\.*\S*\.pub[\.\:]\S* + + //INVOKE/@SERVICE + + //MAPINVOKE/@SERVICE + + //Values//value[@name='rec_ref'] + + Service.doInvoke(*) + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//BRANCH[not(@DISABLED)][not(@LABELEXPRESSIONS)][not(@SWITCH)])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//BRANCH[not(@DISABLED) and @LABELEXPRESSIONS="true"]/*[not(@DISABLED) and ((not(@NAME) or @NAME='$null') and (not(local-name()='COMMENT')))])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count( //EXIT[not(@FROM) or (FROM and @FROM!='')][not(@DISABLED)] )=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//BRANCH[count(./*[local-name()!='COMMENT'])=0])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//LOOP[count(./*[local-name()!='COMMENT'])=0])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//RETRY[count(./*[local-name()!='COMMENT'])=0])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//SEQUENCE[count(./*[local-name()!='COMMENT'])=0])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//MAP[@MODE='STANDALONE' and count(./*[local-name()!='COMMENT'])=0])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//LOOP[not(@IN-ARRAY)][not(@DISABLED)])=0 + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowXMLXpathChecker + + count(//FLOW/*[local-name()!='COMMENT' and not(@DISABLED='true')])>0 + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + rootfolder + %code.review.pkgname% + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + folder + [a-z]{1}[a-z0-9_]* + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + folder-prefix + %folder-prefix% + + + + + 1. The package name should not end with the word "Package" or "Pkg".
2. Package names must not be prefixed with "Wm". These usually represent packages distributed by webMethods.
3. Packages should start with an uppercase character and only contain alpha numeric character plus the underscore when absolutely required..
4. Package Name should start with your provided prefix. A package prefix aids identification of packages created by the organisation.]]>
+ + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + package + %code.review.pkgname% + %code.review.pkgprefix% + [A-Z]{1}[a-zA-Z0-9_]* + + + +
+ + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + service + _get|_post|_delete|_put|_head|_default|[a-z]{1}[a-zA-Z0-9]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + document + [A-Z]{1}[a-zA-Z0-9_]*|docTypeRef_[a-zA-Z0-9]+_[a-zA-Z0-9]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + schema + schema_[A-Z]{1}[a-zA-Z0-9_]*|[A-Z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + flatfile + [A-Z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + blaze + [a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + xsl + [a-z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + adapter + [a-z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + trigger + [a-z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + wsd + [a-z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + connection + [A-Z]{1}[a-zA-Z0-9_]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.DocumentChecker + + + //value[@name="field_name"] + + [a-z]{1}[a-zA-Z0-9]*|\@[a-z]{1}[a-zA-Z0-9]*|[a-zA-Z0-9]*\:[a-zA-Z]{1}[a-zA-Z0-9_\-]*|_env + + + + + + + com.softwareag.gcs.wm.codereview.operations.FlowSignatureChecker + + + //Values//record[@name='svc_sig']//value[@name='field_name'] + + \$resourceID|\$path|\$httpMethod|flow.inputs|flow.outputs|ProcessData|TaskData|TaskCompletionInfo|TaskQueueInfo|JMSMessage|JMSType|ActionEvent|ChallengeEvent|[a-z]{1}[a-zA-Z0-9]* + + + + + + + + com.softwareag.gcs.wm.codereview.operations.NamingStandards + + package-suffix + %code.review.pkgname% + _TEST + _STUB + _DEV + + + + +
+ + + + + + + + + + + + + + + + + + +
diff --git a/resources/isccr/unitReport.xslt b/resources/isccr/unitReport.xslt new file mode 100644 index 0000000..728ae4b --- /dev/null +++ b/resources/isccr/unitReport.xslt @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + + + [] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/ivy/ivy-local.xml b/resources/ivy/ivy-local.xml new file mode 100644 index 0000000..eb243d6 --- /dev/null +++ b/resources/ivy/ivy-local.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/ivy/ivy-remote.xml b/resources/ivy/ivy-remote.xml new file mode 100644 index 0000000..05aa05f --- /dev/null +++ b/resources/ivy/ivy-remote.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/ivy/ivy.properties b/resources/ivy/ivy.properties new file mode 100644 index 0000000..f761f9f --- /dev/null +++ b/resources/ivy/ivy.properties @@ -0,0 +1 @@ +SAGHome=c:\\SoftwareAG99 \ No newline at end of file diff --git a/resources/ivy/ivysettings-local.xml b/resources/ivy/ivysettings-local.xml new file mode 100644 index 0000000..6f6681b --- /dev/null +++ b/resources/ivy/ivysettings-local.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/ivy/ivysettings-remote.xml b/resources/ivy/ivysettings-remote.xml new file mode 100644 index 0000000..ed281e9 --- /dev/null +++ b/resources/ivy/ivysettings-remote.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/resources/log4j.properties b/resources/log4j.properties new file mode 100755 index 0000000..789adf5 --- /dev/null +++ b/resources/log4j.properties @@ -0,0 +1,7 @@ +log4j.logger.TestAutomation=TRACE, myFileAppender +log4j.appender.myFileAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.myFileAppender.datePattern='.'yyyyMMdd +log4j.appender.myFileAppender.File=logs/testautomation.log +log4j.appender.myFileAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.myFileAppender.layout.ConversionPattern=%d{ISO8601} %p \t[%c]\t - %m%n +log4j.additivity.TestAutomation=false diff --git a/resources/quiesce/RestoreTriggersSource.groovy b/resources/quiesce/RestoreTriggersSource.groovy new file mode 100644 index 0000000..0cad3f0 --- /dev/null +++ b/resources/quiesce/RestoreTriggersSource.groovy @@ -0,0 +1,62 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.build.quiesce + +import groovy.io.FileType; + + +def cli = new CliBuilder (usage:'SuspendTriggersSource.groovy ') +cli.with { + h longOpt:'help', 'Usage information' + s longOpt:'pathToIsPackageSource',argName:'pathToIsPackageSource', args:1, required:true, 'Path to the the checkout/source dir with the IS packages"' + p longOpt:'packageList',argName:'packageList', args:1, optionalArg:true, '[optional] Comma separated list of package names for which to suspend triggers. If not provided, all packages in the source dir are scanned.' + t longOpt:'triggerType',argName:'triggerType', args:1, required:true, 'Type of triggers to suspend. Valid options are [all|jms|messaging]' +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return + if(opts.help) { + cli.usage() + return + } + +assert opts +assert opts.pathToIsPackageSource +assert opts.triggerType.equalsIgnoreCase("all") ||opts.triggerType.equalsIgnoreCase("messaging") ||opts.triggerType.equalsIgnoreCase("jms") + +def pathToIsPackageSource = opts.pathToIsPackageSource +def packageList = opts.packageList +def triggerType = opts.triggerType + +def packagesDir = new File(pathToIsPackageSource) +assert packagesDir.exists() : "Path to IS package directory '${pathToIsPackageSource}' does not exist" + +def packageArr = [] +if( !packageList ) { + // get list of all packages in the package dir + packagesDir.eachFile (FileType.DIRECTORIES) { dir -> + packageArr << dir.name + } +} else { + packageArr = packageList.split(",").collect { it.trim() } +} + +println "suspending triggers for all packages in dir '${packagesDir.getAbsolutePath()}'. Packages: " +packageArr.each { println it } + +packageArr.each { packageName -> + def packageDir = new File(packagesDir, packageName) + assert packageDir.exists() : "Package ${packageName} does not exist in directory ${packagesDir.getAbsolutePath()}." + def nsDir = new File(packageDir, "ns") + println ("Searching for all triggers in package " + packageName) + nsDir.traverse( + type : FileType.FILES, + nameFilter: ~/node.ndf.triggerBak/, + preDir: { if (it.name == '.svn' || it.name == '.git') return SKIP_SUBTREE }, + ) { nodeNdf -> + def origFile = new File(nodeNdf.getParent(), "node.ndf") + origFile << nodeNdf.text + } + +} + + diff --git a/resources/quiesce/SuspendTriggersSource.groovy b/resources/quiesce/SuspendTriggersSource.groovy new file mode 100644 index 0000000..ee39e83 --- /dev/null +++ b/resources/quiesce/SuspendTriggersSource.groovy @@ -0,0 +1,96 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.build.quiesce + +import groovy.io.FileType; + + +def cli = new CliBuilder (usage:'SuspendTriggersSource.groovy ') +cli.with { + h longOpt:'help', 'Usage information' + s longOpt:'pathToIsPackageSource',argName:'pathToIsPackageSource', args:1, required:true, 'Path to the the checkout/source dir with the IS packages"' + p longOpt:'packageList',argName:'packageList', args:1, optionalArg:true, '[optional] Comma separated list of package names for which to suspend triggers. If not provided, all packages in the source dir are scanned.' + t longOpt:'triggerType',argName:'triggerType', args:1, required:true, 'Type of triggers to suspend. Valid options are [all|jms|messaging]' +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return + if(opts.help) { + cli.usage() + return + } + +assert opts +assert opts.pathToIsPackageSource +assert opts.triggerType.equalsIgnoreCase("all") ||opts.triggerType.equalsIgnoreCase("messaging") ||opts.triggerType.equalsIgnoreCase("jms") + +def pathToIsPackageSource = opts.pathToIsPackageSource +def packageList = opts.packageList +def triggerType = opts.triggerType + +def packagesDir = new File(pathToIsPackageSource) +assert packagesDir.exists() : "Path to IS package directory '${pathToIsPackageSource}' does not exist" + +// create list of all packages, either all in the dir or all provided as CSV by user +def packageArr = [] +if( !packageList ) { + // get list of all packages in the package dir + packagesDir.eachFile (FileType.DIRECTORIES) { dir -> + packageArr << dir.name + } +} else { + packageArr = packageList.split(",").collect { it.trim() } +} + +println "suspending triggers for all packages in dir '${packagesDir.getAbsolutePath()}'. Packages: " +packageArr.each { println "- " + it } + +packageArr.each { packageName -> + def packageDir = new File(packagesDir, packageName) + if( !packageDir.exists() ) { + println "Package ${packageName} does not exist in directory ${packagesDir.getAbsolutePath()}." + return + } + // start in the ns dir of a package + def nsDir = new File(packageDir, "ns") + if( !nsDir.exists() ) { + // this is no valid IS package directory since the "ns" directory is missing + println "Folder ${packageName} in directory ${packagesDir.getAbsolutePath()} does not represent a valid IS package, since the 'ns' folder is missing. Ignoring." + return; + } + println ("Searching for all triggers in package '" + packageName + "'.") + nsDir.traverse( + type : FileType.FILES, // filter for files + nameFilter: ~/node.ndf/, // with name node.df + preDir: { if (it.name == '.svn' || it.name == '.git') return SKIP_SUBTREE } // which arent a VCS dir + ) { nodeNdf -> + // slurp node.ndf xml + def node = new XmlSlurper().parse(nodeNdf) + // find the following entry jms-trigger + node.'value'.findAll { value -> + value.attributes().get("name") == "trigger_type" + }.each() { trigger -> + println "found trigger of type '${trigger}' in file '${nodeNdf.getAbsolutePath()}'" + // cretae backup of node.ndf file + def backupFile = new File(nodeNdf.getParent(), "node.ndf.triggerBak") + backupFile.write(nodeNdf.text) + // check if it is a JMS or a Messaging trigger + if( trigger == "jms-trigger" ) { + // replace true with false + node.record.Boolean.replaceBody 'false' + nodeNdf.write(groovy.xml.XmlUtil.serialize( node )) + } else if(trigger == "broker-trigger") { + // find two nodes in broker (i.e. messaging) trigger node.ndf, which both represent the supsended state of this trigger + node.record.'value'.findAll { recordValue -> + recordValue.attributes().get("name") == "processingSuspended" || + recordValue.attributes().get("name") == "retrievalSuspended" + }.each() { + it.replaceBody 'true' + } + nodeNdf.write(groovy.xml.XmlUtil.serialize( node )) + } else { + assert false : "Unknown trigger type..." + } + } + }; +} + diff --git a/resources/scripts/ArtifactoryFileSpec.groovy b/resources/scripts/ArtifactoryFileSpec.groovy new file mode 100644 index 0000000..f7710ee --- /dev/null +++ b/resources/scripts/ArtifactoryFileSpec.groovy @@ -0,0 +1,91 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.build.artifactory + + +def cli = new CliBuilder (usage:'artifactoryFileSpec.groovy -outputDir OUTPUT_DIR') +cli.with { + h longOpt:'help', 'Usage information' + outputDir longOpt:'outputDir',argName:'outputDir', args:1, 'Path to the directory where to store the artifactory json upload file spec' + fileSpecName longOpt:'fileSpecName',argName:'fileSpecName', args:1, 'Name of the file spec json file' + pathToFbrZip longOpt:'pathToFbrZip',argName:'pathToFbrZip', args:1, 'Path to the zipped FBR, only mandatory for type="upload"' + artifactoryRepository longOpt:'artifactoryRepository',argName:'artifactoryRepository', args:1, 'Name of the Artifactory Repository' + org longOpt:'org',argName:'org', args:1, 'Artifactory Organization. Use dot-notation for Ivy repo, e.g. com.sag.gcs' + moduleName longOpt:'moduleName',argName:'moduleName', args:1, 'Artifactory Module name' + baseRevision longOpt:'baseRevision',argName:'baseRevision', args:1, 'Artifactory base revision, e.g. version_number.build_number' + type longOpt:'type',argName:'type', args:1, 'Type of file spec, wither "upload" or "download"' + +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return +if(opts.help) { + cli.usage() + return +} + +assert opts +assert opts.outputDir +assert opts.fileSpecName +assert opts.pathToFbrZip || opts.type == "download" +assert opts.org +assert opts.moduleName +assert opts.baseRevision +assert opts.artifactoryRepository +assert opts.type == "download" || opts.type == "upload" + +def outputDir = opts.outputDir +def pathToFbrZip = opts.pathToFbrZip +def fileSpecName = opts.fileSpecName +def org = opts.org +def moduleName = opts.moduleName +def baseRevision = opts.baseRevision +def artifactoryRepository = opts.artifactoryRepository +def type = opts.type + +def outDir = new File(outputDir) +outDir.mkdirs() +assert outDir.exists() +println "Creating Artifactory File Spec with the folllwing properties: " +println "\t outputDir: '${outputDir}'" +println "\t pathToFbrZip: '${pathToFbrZip}'" +println "\t fileSpecName: '${fileSpecName}'" +println "\t org: '${org}'" +println "\t moduleName: '${moduleName}'" +println "\t baseRevision: '${baseRevision}'" +println "\t artifactoryRepository: '${artifactoryRepository}'" +println "\t type: '${type}'" + +def fileList +def repoPath = "${artifactoryRepository}/${org}/${moduleName}/${baseRevision}/fbrs/${moduleName}-${baseRevision}.zip" +if( type == "upload" ) { + fileList = [ + [ pattern : { "${pathToFbrZip}" }, target : { "${repoPath}" }, props : { 'type=zip;status=ready' }, flat : { "true" } ] + ] +} else if ( type == "download") { + fileList = [ + [ pattern : { "${repoPath}" }, target : { "${outputDir}" }, props : { 'type=zip;status=ready' }, flat : { "true" } ] + ] +} + + + +jsonBuilder = new groovy.json.JsonBuilder() + +jsonBuilder { + files fileList.collect { + [ + pattern: it.pattern(), + target: it.target(), + props: it.props(), + flat: it.flat() + ] + } +} +println "Generated File Spec:" +println jsonBuilder.toPrettyString() + + +File fileSpecFile = new File(outDir, "${fileSpecName}") +fileSpecFile.write(jsonBuilder.toPrettyString()) +println "Successfully create file spec: '${fileSpecFile.getAbsolutePath()}'" + diff --git a/resources/setUpInitial.xslt b/resources/setUpInitial.xslt deleted file mode 100644 index a44caf8..0000000 --- a/resources/setUpInitial.xslt +++ /dev/null @@ -1,92 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - : - - - - - - - - - - - - - - false - true - true - - - - - - - - - - - - - - FlatFile - - true - - - - - - - - - - - - - - - - - - - - - - - - testServer - - - - - - - - diff --git a/resources/vs/CreateVarSub.groovy b/resources/vs/CreateVarSub.groovy new file mode 100755 index 0000000..ce585bd --- /dev/null +++ b/resources/vs/CreateVarSub.groovy @@ -0,0 +1,138 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.projectAutomator + +import java.nio.file.Path + +import groovy.xml.* + +import groovy.xml.MarkupBuilder + +def cli = new CliBuilder (usage:'createdeployerproject.groovy -r REPOSITORY_PATH -f OUTPUTFILE -p PROJECT -t TARGETSUFFIX') +cli.with { + h longOpt:'help', 'Usage information' + a longOpt:'a',argName:'paTemplate', args:1, 'Path to project automator template file' + p longOpt:'project', argName:'project', args:1, 'Example: BDA_Automator_Project' + t longOpt:'target', argName:'target', args:1, 'Must have a matching entry in the ENV.groovy flie. Example: DEV.' + vs longOpt:'varSubDirPath', argName:'varSubDirPath', args:1, 'Base dir with the varsub files' + o longOpt:'varSubOutputDirPath', argName:'varSubOutputDirPath', args:1, 'Output directory for varsub files' +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return + if(opts.help) { + cli.usage() + return + } + + +assert opts +assert opts.a +assert opts.p +assert opts.t +assert opts.vs +assert opts.o + + +def paTemplate = opts.a +def project = opts.p +def target = opts.t +def varSubDirPath = opts.vs +def varSubOutputDirPath = opts.o + +println "Creating VarSub file with the following properties:" +println "\t- Project: '${project}'" +println "\t- Project Automator Template: '${paTemplate}'" +println "\t- Target: '${target}'" +println "\t- Varsub base dir: '${varSubDirPath}'" +println "\t- varSubOutputDirPath: '${varSubOutputDirPath}'" + +def currentDir = new File(".") +def varSubDir = new File(varSubDirPath) +def varSubEnvDir = new File(varSubDir, target) +assert varSubDir.exists() : "varSubDir for path '${varSubDirPath}' does not exist. Root dir: ${currentDir.getAbsolutePath()}" +if( !varSubEnvDir.exists()) { + println "varSubDir for environment '${target}' does not exist. Expected directory at: '${varSubDir.getAbsolutePath()}/${target}'. No variable substitution will be done..." + return; +} +def varSubOutputDir = new File(varSubOutputDirPath) +if( !varSubOutputDir.exists() ) { + varSubOutputDir.mkdirs() +} +def paTemplateXml = new XmlSlurper().parse(new File(paTemplate)) + +def varSubEnvDirs = ["IS": new File(varSubEnvDir, "is"), "BPM": new File(varSubEnvDir, "bpm"), "MWS": new File(varSubEnvDir, "mws")] + +//Map targetsIs = { [:].withDefault{ owner.call() } }() +def deploymentMaps = [:] +def deploymentSets = [:] + +// parse project automator template and extract all composites +paTemplateXml.Projects.Project.MapSetMapping.findAll{true}.each { msm -> + String dsName = msm.@setName + String mapName = msm.@mapName + deploymentMaps[mapName] = [] + msm.alias.findAll{true}.each { alias -> + String aliasName = alias.text() + String aliasType = alias.@type + println "found alias '${aliasName}' of type '${aliasType}' for DeploymentSet '${dsName}'" + deploymentMaps[mapName].add([dsName: dsName, targetServerName: aliasName, targetServerType: aliasType]) + } +} + +paTemplateXml.Projects.Project.DeploymentSet.findAll {true}.each { ds -> + String dsName = ds.@name + deploymentSets[dsName] = [:] + ds.Composite.findAll{true}.each{ composite -> + String compositeName = composite.@name + String compositeType = composite.@type + String compositeDisplayName = composite.@displayName + String compositeSrcAlias = composite.@srcAlias + def s = deploymentSets[dsName] + s[compositeType] = s[compositeType] ? s[compositeType] : [] + s[compositeType].add([compositeName: compositeName, compositeDisplayName: compositeDisplayName, compositeSrcAlias: compositeSrcAlias]) + } +} + + +// now create a new varsub file for all composites and all environments +deploymentMaps.each { mapName, targetServers -> + println "- creating varsub for DeploymentMap ${mapName}" + File vsOutputFile = new File(varSubOutputDir, mapName.toString() + ".vs.xml") + PrintWriter vsOutputFileWriter = new PrintWriter(new BufferedWriter(new FileWriter(vsOutputFile))); + def varSubXml = new StreamingMarkupBuilder() + String vsxml = varSubXml.bind { + Root { + targetServers.each { targetServer -> + // println "- creating varsub for ds ${targetServer.dsName} and targetServer ${targetServer.targetServerName} of type ${targetServer.targetServerType}" + def deploymentSetName = targetServer.dsName + def targetServerName = targetServer.targetServerName + def targetServerType = targetServer.targetServerType + // println "deploymentSetName: ${deploymentSetName}, targetServerName: ${targetServerName}, targetServerType: ${targetServerType}" + + def deploymentSet = deploymentSets[deploymentSetName] + deploymentSet[targetServerType].each { composite -> + def assetCompositeName = composite.compositeName + def compositeDisplayName = composite.compositeDisplayName + def compositeSrcAlias = composite.compositeSrcAlias + // get varsub xml for this composite + def vsTemplateFile = new File(varSubEnvDirs[targetServerType], "${assetCompositeName}.vs.xml") + if( vsTemplateFile.exists() ) { + def p = new XmlSlurper().parse( vsTemplateFile ) + def deploymentSetNodeName = p.name() + "$deploymentSetNodeName"( + assetCompositeName: assetCompositeName, + deploymentSetName: deploymentSetName, + serverAliasName: compositeSrcAlias, + targetServerName: targetServerName, + targetServerType: targetServerType) { + mkp.yield p.children() + } + } + } + } + } + } + println XmlUtil.serialize( vsxml, vsOutputFileWriter ) + vsOutputFileWriter.close() + println "wrote varsub file ${vsOutputFile.getAbsolutePath()}" +} diff --git a/resources/vs/ExtractVarSub.groovy b/resources/vs/ExtractVarSub.groovy new file mode 100755 index 0000000..9235cad --- /dev/null +++ b/resources/vs/ExtractVarSub.groovy @@ -0,0 +1,89 @@ +#!/opt/bpa/groovy-2.4.3/bin/groovy +package com.softwaerag.gcs.wx.bdas.projectAutomator + +import java.nio.file.Path +import groovy.xml.* + +import groovy.xml.MarkupBuilder + +def cli = new CliBuilder (usage:'ExtractVarSub.groovy -varsubFilePath varSubFilePath -varsubDirPath varSubTemplateDirPath -target targetEnvironment') +cli.with { + h longOpt:'help', 'Usage information' + varsubFilePath longOpt:'varsubFilePath',argName:'varsubFilePath', args:1, 'Path to the varsub file exported from Deployer' + varsubDirPath longOpt:'varsubDirPath', argName:'varsubDirPath', args:1, 'Path to the dir with the varsub templates' + target longOpt:'target', argName:'target', args:1, 'Must have a matching entry in the ENV.groovy flie. Example: DEV.' +} +def opts = cli.parse(args) +org.codehaus.groovy.ant.Groovy +if(!opts) return + if(opts.help) { + cli.usage() + return + } + +assert opts +assert opts.varsubFilePath +assert opts.varsubDirPath +assert opts.target + + +def varsubFilePath = opts.varsubFilePath +def varsubDirPath = opts.varsubDirPath +def target = opts.target + +println "Creating VarSub file with the following properties: " +println "\t- Varsub file exported from Deployer: '${varsubFilePath}'" +println "\t- Directory where to store varsub template files: '${varsubDirPath}'" +println "\t- Target environment: '${target}'" + +def currentDir = new File(".") +def varSubDir = new File(varsubDirPath) +def varSubEnvDir = new File(varSubDir, target) +if( !varSubEnvDir.exists() ) { + varSubEnvDir.mkdirs() + println "Created varsub directory '${varSubEnvDir.getAbsolutePath()}' for target environment '${target}'" +} +// varsub xml files for IntegrationServer packages must be stored in a sub folder called "is" +def varSubEnvISDir = new File(varSubEnvDir, "is") +// varsub xml files for MWS assets must be stored in a sub folder called "mws" +def varSubEnvMWSDir = new File(varSubEnvDir, "mws") + +// get the varsub file which was exported from Deployer +def varsubFile = new File(varsubFilePath) +assert varsubFile.exists() : "Varsubfile '${varsubFile}' does not exist" +def varsubFileXml = new XmlSlurper().parse(varsubFile) + +// parse the varsub file and get all IS composites +varsubFileXml.'**'.findAll{ (it.name() == 'DeploymentSet' || it.name() == 'DepoeymentSet') && it.@targetServerType == 'IS'}.each { ISDeploementSet -> +//varsubFileXml.'**'.findAll{ it.name() == 'DeploymentSet' && it.@targetServerType == 'IS'}.each { ISDeploementSet -> + def assetName = "" + ISDeploementSet.@assetCompositeName + def deploymentSetName = ISDeploementSet.name() + println "Found varsub for IS package '${assetName}'" + // create a new XML with a StreamingMarkupBuilder, which will store the varsub for this IS package + def varSubXml = new StreamingMarkupBuilder() + // give the new varsub for this IS package the standard name ${assetName}.vs.xml + def outputFile = new File(varSubEnvISDir, assetName + ".vs.xml") + // check if a varsub file already exists for this package. If so, create a numbered backup + if( outputFile.exists() ) { + def backupFile = new File(varSubEnvISDir, assetName + ".vs.xml.bak") + def i=0 + while( backupFile.exists() ) { + backupFile = new File(varSubEnvISDir, assetName + ".vs.xml.bak" + ++i) + } + backupFile << outputFile.text + println "created backup '${backupFile.getAbsolutePath()}' of existing varsub file" + } + PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(outputFile))); + // create the varsub xml using the StreamingMarkupBuilder + String nxml = varSubXml.bind { + "$deploymentSetName" { + // add the string representation of the current deployment set to the varsub file + // note: "ISDeploementSet.children().toString()" returns a valid xml string + mkp.yield ISDeploementSet.children() + } + } + // write the xml to file + println XmlUtil.serialize( nxml, writer ) + writer.close() + println "done creating varsub template '${outputFile.getAbsolutePath()}'" +} diff --git a/src/com/softwareag/wx/bda/utils/ant/BDALogger.java b/src/com/softwareag/wx/bda/utils/ant/BDALogger.java new file mode 100755 index 0000000..4ddd20a --- /dev/null +++ b/src/com/softwareag/wx/bda/utils/ant/BDALogger.java @@ -0,0 +1,133 @@ +package com.softwareag.wx.bda.utils.ant; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Calendar; +import java.util.Date; +import java.util.Properties; + +import org.apache.log4j.Logger; +import org.apache.log4j.Priority; +import org.apache.log4j.PropertyConfigurator; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.apache.tools.ant.Task; + +import com.softwareag.wx.bda.utils.ant.BDALogger; + +public class BDALogger extends Task { + + public static void main(String args[]) { + BDALogger t = new BDALogger(); + t.setMsg("test"); + t.setLevel("INFO"); + t.execute(); + } + + private static Properties logProperties = null; + + public void init() { + boolean propertiesLoaded = false; + StringBuffer logString = new StringBuffer(); + if (logProperties == null) { + + String logPropertiesPath = getProject().getProperty( + "bda.logging.log4j.propertiesFile"); + File logPropertiesFile = null; + if (logPropertiesPath == null || "".equals(logPropertiesPath)) { + logPropertiesPath = "resources/log4j.properties"; + logString.append("BDA: Property 'bda.logging.log4j.propertiesFile' is not set. Trying to load log4j properties file from '" + + logPropertiesPath + "'."); + logPropertiesFile = new File(logPropertiesPath); + if (logProperties == null || !logPropertiesFile.exists()) { + logPropertiesPath = "log4j.properties"; + logString.append("BDA: Trying to load log4j properties file from '" + + logPropertiesPath + "'."); + logPropertiesFile = new File(logPropertiesPath); + } + } else { + logString.append("BDA: Trying to loading log4j properties file as defined in the properties 'bda.logging.log4j.propertiesFile' from '" + + logPropertiesPath + "'."); + logPropertiesFile = new File(logPropertiesPath); + } + logProperties = new Properties(); + try { + logProperties.load(new FileInputStream(logPropertiesFile)); + propertiesLoaded = true; + } catch (FileNotFoundException fnfe) { + log("BDA: log4j properties file '" + logPropertiesPath + + "' for test automation could not be found: " + fnfe, + Project.MSG_ERR); + } catch (IOException ioe) { + log("BDA: Exception occured while loading log4j properties file '" + + logPropertiesPath + "' for test automation: " + ioe, + Project.MSG_ERR); + } + if (propertiesLoaded == false) { + log(logString.toString(), + Project.MSG_VERBOSE); + logProperties.setProperty("log4j.logger.TestAutomation", + "INFO, myFileAppender"); + logProperties.setProperty("log4j.appender.myFileAppender", + "org.apache.log4j.DailyRollingFileAppender"); + logProperties.setProperty( + "log4j.appender.myFileAppender.datePattern", + "'.'yyyyMMdd"); + logProperties.setProperty("log4j.appender.myFileAppender.File", + "logs/testautomation.log"); + logProperties.setProperty( + "log4j.appender.myFileAppender.layout", + "org.apache.log4j.PatternLayout"); + logProperties + .setProperty( + "log4j.appender.myFileAppender.layout.ConversionPattern", + "%d{ISO8601} %p [%c] - %m%n"); + } + PropertyConfigurator.configure(logProperties); + } + } + + private static Logger l = Logger.getLogger("TestAutomation"); + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public String getLevel() { + return level; + } + + public void setLevel(String level) { + this.level = level; + } + + private String msg; + private String level = "INFO"; + + public void execute() throws BuildException { + init(); + Project p = this.getProject(); + this.msg = "[" + p.getName() + ":" + this.getOwningTarget() + "] " + + msg; + if (this.level.toLowerCase().equals("info")) { + l.info(this.msg); + log(this.msg, Project.MSG_INFO); + } else if (this.level.toLowerCase().equals("error")) { + l.error(this.msg); + log(this.msg, Project.MSG_ERR); + } else if (this.level.toLowerCase().equals("trace")) { + l.trace(this.msg); + log(this.msg, Project.MSG_VERBOSE); + } else { + l.trace(this.msg); + log(this.msg, Project.MSG_INFO); + } + } + +} \ No newline at end of file diff --git a/src/com/softwareag/wx/bda/utils/ant/bdaAntlib.xml b/src/com/softwareag/wx/bda/utils/ant/bdaAntlib.xml new file mode 100755 index 0000000..e0717f9 --- /dev/null +++ b/src/com/softwareag/wx/bda/utils/ant/bdaAntlib.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file